Exemplo n.º 1
0
class PyOpencvPython(PythonPackage):
    """Pre-built CPU-only OpenCV packages for Python."""

    homepage = "https://pypi.org/project/opencv-python"
    url = "https://files.pythonhosted.org/packages/01/9b/be08992293fb21faf35ab98e06924d7407fcfca89d89c5de65442631556a/opencv-python-4.5.3.56.tar.gz"

    version('4.5.3.56',
            sha256=
            '3c001d3feec7f3140f1fb78dfc52ca28122db8240826882d175a208a89d2731b')

    depends_on('[email protected]:', type=('build', 'run'))
    depends_on('cmake', type='build')
    depends_on('py-numpy', type='build')
    depends_on('py-setuptools', type='build')
    depends_on('py-scikit-build', type='build')
Exemplo n.º 2
0
class HipPackage(PackageBase):
    """Auxiliary class which contains HIP variant, dependencies and conflicts
    and is meant to unify and facilitate its usage. Closely mimics CudaPackage.

    Maintainers: dtaller
    """

    # https://llvm.org/docs/AMDGPUUsage.html
    # Possible architectures
    amdgpu_targets = (
        'gfx701', 'gfx801', 'gfx802', 'gfx803',
        'gfx900', 'gfx906', 'gfx908', 'gfx1010',
        'gfx1011', 'gfx1012', 'none'
    )

    variant('hip', default=False, description='Enable HIP support')

    # possible amd gpu targets for hip builds
    variant('amdgpu_target', default='none', values=amdgpu_targets)

    depends_on('llvm-amdgpu', when='+hip')
    depends_on('hsa-rocr-dev', when='+hip')
    depends_on('hip', when='+hip')

    # need amd gpu type for hip builds
    conflicts('amdgpu_target=none', when='+hip')

    # Make sure non-'none' amdgpu_targets cannot be used without +hip
    for value in amdgpu_targets[:-1]:
        conflicts('~hip', when='amdgpu_target=' + value)

    # https://github.com/ROCm-Developer-Tools/HIP/blob/master/bin/hipcc
    # It seems that hip-clang does not (yet?) accept this flag, in which case
    # we will still need to set the HCC_AMDGPU_TARGET environment flag in the
    # hip package file. But I will leave this here for future development.
    @staticmethod
    def hip_flags(amdgpu_target):
        return '--amdgpu-target={0}'.format(amdgpu_target)

    # https://llvm.org/docs/AMDGPUUsage.html
    # Possible architectures (not including 'none' option)
    @staticmethod
    def amd_gputargets_list():
        return (
            'gfx701', 'gfx801', 'gfx802', 'gfx803',
            'gfx900', 'gfx906', 'gfx908', 'gfx1010',
            'gfx1011', 'gfx1012'
        )
Exemplo n.º 3
0
class RPackage(PackageBase):
    """Specialized class for packages that are built using R

    This class provides a single phase that can be overridden:

        1. :py:meth:`~.RPackage.install`

    It has sensible defaults, and for many packages the only thing
    necessary will be to add dependencies
    """
    phases = ['install']

    #: This attribute is used in UI queries that need to know the build
    #: system base class
    build_system_class = 'RPackage'

    extends('r')

    depends_on('r', type=('build', 'run'))

    def install(self, spec, prefix):
        """Installs an R package."""
        inspect.getmodule(self).R(
            'CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir),
            self.stage.source_path)

    # Check that self.prefix is there after installation
    run_after('install')(PackageBase.sanity_check_prefix)
Exemplo n.º 4
0
class ClangTools(Package):
    """Copy binaries like clang-format out of an LLVM installation and into a
    clean directory for which a module can safely be generated."""

    has_code = False
    homepage = LLVM.homepage

    # Add a clang-format version for every LLVM version
    for llvm_ver in LLVM.versions:
        clang_tools_ver = Version(str(llvm_ver) + "p2")
        version(clang_tools_ver)
        depends_on(
            "llvm@{}".format(llvm_ver), when="@{}".format(clang_tools_ver), type="build"
        )

    def install(self, spec, prefix):
        for utility in (
            ("bin", "clang-format"),
            ("bin", "clang-tidy"),
            ("bin", "git-clang-format"),
            ("share", "clang", "clang-format-diff.py"),
        ):
            source_dir = spec["llvm"].prefix
            destination_dir = spec.prefix
            for component in utility[:-1]:
                source_dir = source_dir.join(component)
                destination_dir = destination_dir.join(component)
            mkdirp(destination_dir)
            install(source_dir.join(utility[-1]), destination_dir.join(utility[-1]))
Exemplo n.º 5
0
Arquivo: maven.py Projeto: eic/spack
class MavenPackage(PackageBase):
    """Specialized class for packages that are built using the
    Maven build system. See https://maven.apache.org/index.html
    for more information.

    This class provides the following phases that can be overridden:

    * build
    * install
    """
    # Default phases
    phases = ['build', 'install']

    # To be used in UI queries that require to know which
    # build-system class we are using
    build_system_class = 'MavenPackage'

    depends_on('java', type=('build', 'run'))
    depends_on('maven', type='build')

    @property
    def build_directory(self):
        """The directory containing the ``pom.xml`` file."""
        return self.stage.source_path

    def build_args(self):
        """List of args to pass to build phase."""
        return []

    def build(self, spec, prefix):
        """Compile code and package into a JAR file."""

        with working_dir(self.build_directory):
            mvn = which('mvn')
            if self.run_tests:
                mvn('verify', *self.build_args())
            else:
                mvn('package', '-DskipTests', *self.build_args())

    def install(self, spec, prefix):
        """Copy to installation prefix."""

        with working_dir(self.build_directory):
            install_tree('.', prefix)

    # Check that self.prefix is there after installation
    run_after('install')(PackageBase.sanity_check_prefix)
Exemplo n.º 6
0
class RPackage(PackageBase):
    """Specialized class for packages that are built using R.

    For more information on the R build system, see:
    https://stat.ethz.ch/R-manual/R-devel/library/utils/html/INSTALL.html

    This class provides a single phase that can be overridden:

        1. :py:meth:`~.RPackage.install`

    It has sensible defaults, and for many packages the only thing
    necessary will be to add dependencies
    """
    phases = ['install']

    #: This attribute is used in UI queries that need to know the build
    #: system base class
    build_system_class = 'RPackage'

    extends('r')

    depends_on('r', type=('build', 'run'))

    def configure_args(self):
        """Arguments to pass to install via ``--configure-args``."""
        return []

    def configure_vars(self):
        """Arguments to pass to install via ``--configure-vars``."""
        return []

    def install(self, spec, prefix):
        """Installs an R package."""

        config_args = self.configure_args()
        config_vars = self.configure_vars()

        args = [
            'CMD',
            'INSTALL'
        ]

        if config_args:
            args.append('--configure-args={0}'.format(' '.join(config_args)))

        if config_vars:
            args.append('--configure-vars={0}'.format(' '.join(config_vars)))

        args.extend([
            '--library={0}'.format(self.module.r_lib_dir),
            self.stage.source_path
        ])

        inspect.getmodule(self).R(*args)

    # Check that self.prefix is there after installation
    run_after('install')(PackageBase.sanity_check_prefix)
Exemplo n.º 7
0
class ROCmPackage(PackageBase):
    """Auxiliary class which contains ROCm variant, dependencies and conflicts
    and is meant to unify and facilitate its usage. Closely mimics CudaPackage.

    Maintainers: dtaller
    """

    # https://llvm.org/docs/AMDGPUUsage.html
    # Possible architectures
    amdgpu_targets = (
        'gfx701', 'gfx801', 'gfx802', 'gfx803',
        'gfx900', 'gfx906', 'gfx908', 'gfx90a', 'gfx1010',
        'gfx1011', 'gfx1012'
    )

    variant('rocm', default=False, description='Enable ROCm support')

    # possible amd gpu targets for rocm builds
    variant('amdgpu_target',
            description='AMD GPU architecture',
            values=spack.variant.any_combination_of(*amdgpu_targets),
            when='+rocm')

    depends_on('llvm-amdgpu', when='+rocm')
    depends_on('hsa-rocr-dev', when='+rocm')
    depends_on('hip', when='+rocm')

    conflicts('^blt@:0.3.6', when='+rocm')

    # need amd gpu type for rocm builds
    conflicts('amdgpu_target=none', when='+rocm')

    # Make sure amdgpu_targets cannot be used without +rocm
    for value in amdgpu_targets:
        conflicts('~rocm', when='amdgpu_target=' + value)

    # https://github.com/ROCm-Developer-Tools/HIP/blob/master/bin/hipcc
    # It seems that hip-clang does not (yet?) accept this flag, in which case
    # we will still need to set the HCC_AMDGPU_TARGET environment flag in the
    # hip package file. But I will leave this here for future development.
    @staticmethod
    def hip_flags(amdgpu_target):
        archs = ",".join(amdgpu_target)
        return '--amdgpu-target={0}'.format(archs)
Exemplo n.º 8
0
class QMakePackage(PackageBase):
    """Specialized class for packages built using qmake.

    For more information on the qmake build system, see:
    http://doc.qt.io/qt-5/qmake-manual.html

    This class provides three phases that can be overridden:

    1. :py:meth:`~.QMakePackage.qmake`
    2. :py:meth:`~.QMakePackage.build`
    3. :py:meth:`~.QMakePackage.install`

    They all have sensible defaults and for many packages the only thing
    necessary will be to override :py:meth:`~.QMakePackage.qmake_args`.
    """
    #: Phases of a qmake package
    phases = ['qmake', 'build', 'install']

    #: This attribute is used in UI queries that need to know the build
    #: system base class
    build_system_class = 'QMakePackage'

    #: Callback names for build-time test
    build_time_test_callbacks = ['check']

    depends_on('qt', type='build')

    def qmake_args(self):
        """Produces a list containing all the arguments that must be passed to
        qmake
        """
        return []

    def qmake(self, spec, prefix):
        """Run ``qmake`` to configure the project and generate a Makefile."""
        inspect.getmodule(self).qmake(*self.qmake_args())

    def build(self, spec, prefix):
        """Make the build targets"""
        inspect.getmodule(self).make()

    def install(self, spec, prefix):
        """Make the install targets"""
        inspect.getmodule(self).make('install')

    # Tests

    def check(self):
        """Searches the Makefile for a ``check:`` target and runs it if found.
        """
        self._if_make_target_execute('check')

    run_after('build')(PackageBase._run_default_build_time_test_callbacks)

    # Check that self.prefix is there after installation
    run_after('install')(PackageBase.sanity_check_prefix)
Exemplo n.º 9
0
class X264(AutotoolsPackage):
    """Software library and application for encoding video streams"""

    homepage = "https://www.videolan.org/developers/x264.html"
    git = "https://code.videolan.org/videolan/x264.git"

    version("20210613", commit="5db6aa6cab1b146e07b60cc1736a01f21da01154")

    depends_on("nasm")

    def configure_args(self):
        return ["--enable-shared", "--enable-pic"]
Exemplo n.º 10
0
class CudaPackage(PackageBase):
    """Auxiliary class which contains CUDA variant, dependencies and conflicts
    and is meant to unify and facilitate its usage.
    """

    # FIXME: keep cuda and cuda_arch separate to make usage easier untill
    # Spack has depends_on(cuda, when='cuda_arch!=None') or alike
    variant('cuda', default=False,
            description='Build with CUDA')
    # see http://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
    # https://developer.nvidia.com/cuda-gpus
    variant('cuda_arch', default=None,
            description='CUDA architecture',
            values=('20', '30', '32', '35', '50', '52', '53', '60', '61',
                    '62', '70'),
            multi=True)

    # see http://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#nvcc-examples
    # and http://llvm.org/docs/CompileCudaWithLLVM.html#compiling-cuda-code
    @staticmethod
    def cuda_flags(arch_list):
        return [('--generate-code arch=compute_{0},code=sm_{0} '
                 '--generate-code arch=compute_{0},code=compute_{0}').format(s)
                for s in arch_list]

    depends_on("cuda@7:", when='+cuda')

    # CUDA version vs Architecture
    depends_on("cuda@8:", when='cuda_arch=60')
    depends_on("cuda@8:", when='cuda_arch=61')
    depends_on("cuda@8:", when='cuda_arch=62')
    depends_on("cuda@9:", when='cuda_arch=70')

    depends_on('cuda@:8.99', when='cuda_arch=20')

    # Compiler conflicts:
    # https://gist.github.com/ax3l/9489132
    conflicts('%gcc@5:', when='+cuda ^cuda@:7.5')
    conflicts('%gcc@6:', when='+cuda ^cuda@:8.99')
    conflicts('%gcc@7:', when='+cuda ^cuda@:9.99')
    if (platform.system() != "Darwin"):
        conflicts('%clang@:3.4,3.7:', when='+cuda ^[email protected]')
        conflicts('%clang@:3.7,4:', when='+cuda ^cuda@8:9')
    conflicts('%intel@:14,16:', when='+cuda ^[email protected]')
    conflicts('%intel@:14,17:', when='+cuda ^[email protected]')
    conflicts('%intel@:14,18:', when='+cuda ^[email protected]:9')

    # Make sure cuda_arch can not be used without +cuda
    conflicts('~cuda', when='cuda_arch=20')
    conflicts('~cuda', when='cuda_arch=30')
    conflicts('~cuda', when='cuda_arch=32')
    conflicts('~cuda', when='cuda_arch=35')
    conflicts('~cuda', when='cuda_arch=50')
    conflicts('~cuda', when='cuda_arch=52')
    conflicts('~cuda', when='cuda_arch=53')
    conflicts('~cuda', when='cuda_arch=60')
    conflicts('~cuda', when='cuda_arch=61')
    conflicts('~cuda', when='cuda_arch=62')
    conflicts('~cuda', when='cuda_arch=70')
Exemplo n.º 11
0
class PyPlotnine(PythonPackage):
    """plotnine is an implementation of a grammar of graphics in Python, it is
    based on ggplot2. The grammar allows users to compose plots by explicitly
    mapping data to the visual objects that make up the plot."""

    pypi = "plotnine/plotnine-0.8.0.tar.gz"

    version(
        "0.8.0",
        sha256="39de59edcc28106761b65238647d0b1f6212ea7f3a78f8be0b846616db969276",
    )

    depends_on('[email protected]:', type=('build', 'run'))
    depends_on("[email protected]:", type=("build", "run"))
    depends_on("[email protected]:", type=("build", "run"))
    depends_on("[email protected]:", type=("build", "run"))
    depends_on("[email protected]:", type=("build", "run"))
    depends_on("[email protected]:", type=("build", "run"))
    depends_on("[email protected]:", type=("build", "run"))
    depends_on("[email protected]:", type=("build", "run"))
    depends_on("py-setuptools", type="build")
    depends_on("[email protected]:", type=("build", "run"))
Exemplo n.º 12
0
class RubyPackage(PackageBase):
    """Specialized class for building Ruby gems.

    This class provides two phases that can be overridden if required:

    #. :py:meth:`~.RubyPackage.build`
    #. :py:meth:`~.RubyPackage.install`
    """
    #: Phases of a Ruby package
    phases = ['build', 'install']

    #: This attribute is used in UI queries that need to know the build
    #: system base class
    build_system_class = 'RubyPackage'

    extends('ruby')

    depends_on('ruby', type=('build', 'run'))

    def build(self, spec, prefix):
        """Build a Ruby gem."""

        # ruby-rake provides both rake.gemspec and Rakefile, but only
        # rake.gemspec can be built without an existing rake installation
        gemspecs = glob.glob('*.gemspec')
        rakefiles = glob.glob('Rakefile')
        if gemspecs:
            inspect.getmodule(self).gem('build', '--norc', gemspecs[0])
        elif rakefiles:
            jobs = inspect.getmodule(self).make_jobs
            inspect.getmodule(self).rake('package', '-j{0}'.format(jobs))
        else:
            # Some Ruby packages only ship `*.gem` files, so nothing to build
            pass

    def install(self, spec, prefix):
        """Install a Ruby gem.

        The ruby package sets ``GEM_HOME`` to tell gem where to install to."""

        gems = glob.glob('*.gem')
        if gems:
            inspect.getmodule(self).gem(
                'install', '--norc', '--ignore-dependencies', gems[0])

    # Check that self.prefix is there after installation
    run_after('install')(PackageBase.sanity_check_prefix)
Exemplo n.º 13
0
class OctavePackage(PackageBase):
    """Specialized class for Octave packages. See
    https://www.gnu.org/software/octave/doc/v4.2.0/Installing-and-Removing-Packages.html
    for more information.

    This class provides the following phases that can be overridden:

    1. :py:meth:`~.OctavePackage.install`

    """
    # Default phases
    phases = ['install']

    # To be used in UI queries that require to know which
    # build-system class we are using
    build_system_class = 'OctavePackage'

    extends('octave')
    depends_on('octave', type=('build', 'run'))

    def setup_environment(self, spack_env, run_env):
        """Set up the compile and runtime environments for a package."""
        # octave does not like those environment variables to be set:
        spack_env.unset('CC')
        spack_env.unset('CXX')
        spack_env.unset('FC')

    def install(self, spec, prefix):
        """Install the package from the archive file"""
        inspect.getmodule(self).octave(
            '--quiet',
            '--norc',
            '--built-in-docstrings-file=/dev/null',
            '--texi-macros-file=/dev/null',
            '--eval', 'pkg prefix %s; pkg install %s' %
            (prefix, self.stage.archive_file))

    # Testing

    # Check that self.prefix is there after installation
    run_after('install')(PackageBase.sanity_check_prefix)
Exemplo n.º 14
0
class PyMizani(PythonPackage):
    """Mizani is a scales package for graphics. It is based on Hadley Wickham's
    Scales package."""

    pypi = "mizani/mizani-0.7.3.tar.gz"

    version(
        "0.7.3",
        sha256=
        "f521300bd29ca918fcd629bc8ab50fa04e41bdbe00f6bcf74055d3c6273770a4",
    )

    depends_on('[email protected]:', type=('build', 'run'))
    depends_on("[email protected]:", type=("build", "run"))
    depends_on("py-numpy", type=("build", "run"))
    depends_on("py-palettable", type=("build", "run"))
    depends_on("[email protected]:", type=("build", "run"))
    depends_on("py-setuptools", type="build")
Exemplo n.º 15
0
class PyAtldld(PythonPackage):
    """Search, download, and prepare brain atlas data."""

    homepage = "atlas-download-tools.rtfd.io"
    git = "https://github.com/BlueBrain/Atlas-Download-Tools.git"

    maintainers = ["EmilieDel", "jankrepl", "Stannislav"]

    version("0.3.2", tag="v0.3.2")
    version("0.3.1", tag="v0.3.1")
    version("0.3.0", tag="v0.3.0")
    version("0.2.2", tag="v0.2.2")

    # Build dependencies
    depends_on("[email protected]:", type=("build", "run"))
    depends_on("py-setuptools", type="build")
    depends_on("py-setuptools-scm", type="build")

    depends_on("py-appdirs", when="@0.3.1:", type=("build", "run"))
    depends_on("py-click@8:", when="@0.3.0:", type=("build", "run"))
    depends_on("py-dataclasses", when="@0.3.1: ^[email protected]", type=("build", "run"))
    depends_on("py-matplotlib", type=("build", "run"))
    depends_on("py-numpy", type=("build", "run"))
    depends_on("py-opencv-python", type=("build", "run"))
    depends_on("py-pandas", type=("build", "run"))
    depends_on("py-pillow", when="@0.3.1:", type=("build", "run"))
    depends_on("py-requests", type=("build", "run"))
    depends_on("py-responses", type=("build", "run"))
    depends_on("py-scikit-image", type=("build", "run"))
Exemplo n.º 16
0
class PythonPackage(PackageBase):
    """Specialized class for packages that are built using Python
    setup.py files

    This class provides the following phases that can be overridden:

    * build
    * build_py
    * build_ext
    * build_clib
    * build_scripts
    * clean
    * install
    * install_lib
    * install_headers
    * install_scripts
    * install_data
    * sdist
    * register
    * bdist
    * bdist_dumb
    * bdist_rpm
    * bdist_wininst
    * upload
    * check

    These are all standard setup.py commands and can be found by running:

    .. code-block:: console

       $ python setup.py --help-commands

    By default, only the 'build' and 'install' phases are run, but if you
    need to run more phases, simply modify your ``phases`` list like so:

    .. code-block:: python

       phases = ['build_ext', 'install', 'bdist']

    Each phase provides a function <phase> that runs:

    .. code-block:: console

       $ python -s setup.py --no-user-cfg <phase>

    Each phase also has a <phase_args> function that can pass arguments to
    this call. All of these functions are empty except for the ``install_args``
    function, which passes ``--prefix=/path/to/installation/directory``.

    If you need to run a phase which is not a standard setup.py command,
    you'll need to define a function for it like so:

    .. code-block:: python

       def configure(self, spec, prefix):
           self.setup_py('configure')
    """
    # Default phases
    phases = ['build', 'install']

    # Name of modules that the Python package provides
    # This is used to test whether or not the installation succeeded
    # These names generally come from running:
    #
    # >>> import setuptools
    # >>> setuptools.find_packages()
    #
    # in the source tarball directory
    import_modules = []

    # To be used in UI queries that require to know which
    # build-system class we are using
    build_system_class = 'PythonPackage'

    #: Callback names for build-time test
    build_time_test_callbacks = ['test']

    #: Callback names for install-time test
    install_time_test_callbacks = ['import_module_test']

    extends('python')

    depends_on('python', type=('build', 'run'))

    py_namespace = None

    def setup_file(self):
        """Returns the name of the setup file to use."""
        return 'setup.py'

    @property
    def build_directory(self):
        """The directory containing the ``setup.py`` file."""
        return self.stage.source_path

    def python(self, *args, **kwargs):
        inspect.getmodule(self).python(*args, **kwargs)

    def setup_py(self, *args, **kwargs):
        setup = self.setup_file()

        with working_dir(self.build_directory):
            self.python('-s', setup, '--no-user-cfg', *args, **kwargs)

    def _setup_command_available(self, command):
        """Determines whether or not a setup.py command exists.

        Args:
            command (str): The command to look for

        Returns:
            bool: True if the command is found, else False
        """
        kwargs = {
            'output': os.devnull,
            'error': os.devnull,
            'fail_on_error': False
        }

        python = inspect.getmodule(self).python
        setup = self.setup_file()

        python('-s', setup, '--no-user-cfg', command, '--help', **kwargs)
        return python.returncode == 0

    # The following phases and their descriptions come from:
    #   $ python setup.py --help-commands

    # Standard commands

    def build(self, spec, prefix):
        """Build everything needed to install."""
        args = self.build_args(spec, prefix)

        self.setup_py('build', *args)

    def build_args(self, spec, prefix):
        """Arguments to pass to build."""
        return []

    def build_py(self, spec, prefix):
        '''"Build" pure Python modules (copy to build directory).'''
        args = self.build_py_args(spec, prefix)

        self.setup_py('build_py', *args)

    def build_py_args(self, spec, prefix):
        """Arguments to pass to build_py."""
        return []

    def build_ext(self, spec, prefix):
        """Build C/C++ extensions (compile/link to build directory)."""
        args = self.build_ext_args(spec, prefix)

        self.setup_py('build_ext', *args)

    def build_ext_args(self, spec, prefix):
        """Arguments to pass to build_ext."""
        return []

    def build_clib(self, spec, prefix):
        """Build C/C++ libraries used by Python extensions."""
        args = self.build_clib_args(spec, prefix)

        self.setup_py('build_clib', *args)

    def build_clib_args(self, spec, prefix):
        """Arguments to pass to build_clib."""
        return []

    def build_scripts(self, spec, prefix):
        '''"Build" scripts (copy and fixup #! line).'''
        args = self.build_scripts_args(spec, prefix)

        self.setup_py('build_scripts', *args)

    def clean(self, spec, prefix):
        """Clean up temporary files from 'build' command."""
        args = self.clean_args(spec, prefix)

        self.setup_py('clean', *args)

    def clean_args(self, spec, prefix):
        """Arguments to pass to clean."""
        return []

    def install(self, spec, prefix):
        """Install everything from build directory."""
        args = self.install_args(spec, prefix)

        self.setup_py('install', *args)

    def install_args(self, spec, prefix):
        """Arguments to pass to install."""
        args = ['--prefix={0}'.format(prefix)]

        # This option causes python packages (including setuptools) NOT
        # to create eggs or easy-install.pth files.  Instead, they
        # install naturally into $prefix/pythonX.Y/site-packages.
        #
        # Eggs add an extra level of indirection to sys.path, slowing
        # down large HPC runs.  They are also deprecated in favor of
        # wheels, which use a normal layout when installed.
        #
        # Spack manages the package directory on its own by symlinking
        # extensions into the site-packages directory, so we don't really
        # need the .pth files or egg directories, anyway.
        #
        # We need to make sure this is only for build dependencies. A package
        # such as py-basemap will not build properly with this flag since
        # it does not use setuptools to build and those does not recognize
        # the --single-version-externally-managed flag
        if ('py-setuptools' == spec.name or  # this is setuptools, or
                'py-setuptools' in spec._dependencies
                and  # it's an immediate dep
                'build' in spec._dependencies['py-setuptools'].deptypes):
            args += ['--single-version-externally-managed', '--root=/']

        return args

    def install_lib(self, spec, prefix):
        """Install all Python modules (extensions and pure Python)."""
        args = self.install_lib_args(spec, prefix)

        self.setup_py('install_lib', *args)

    def install_lib_args(self, spec, prefix):
        """Arguments to pass to install_lib."""
        return []

    def install_headers(self, spec, prefix):
        """Install C/C++ header files."""
        args = self.install_headers_args(spec, prefix)

        self.setup_py('install_headers', *args)

    def install_headers_args(self, spec, prefix):
        """Arguments to pass to install_headers."""
        return []

    def install_scripts(self, spec, prefix):
        """Install scripts (Python or otherwise)."""
        args = self.install_scripts_args(spec, prefix)

        self.setup_py('install_scripts', *args)

    def install_scripts_args(self, spec, prefix):
        """Arguments to pass to install_scripts."""
        return []

    def install_data(self, spec, prefix):
        """Install data files."""
        args = self.install_data_args(spec, prefix)

        self.setup_py('install_data', *args)

    def install_data_args(self, spec, prefix):
        """Arguments to pass to install_data."""
        return []

    def sdist(self, spec, prefix):
        """Create a source distribution (tarball, zip file, etc.)."""
        args = self.sdist_args(spec, prefix)

        self.setup_py('sdist', *args)

    def sdist_args(self, spec, prefix):
        """Arguments to pass to sdist."""
        return []

    def register(self, spec, prefix):
        """Register the distribution with the Python package index."""
        args = self.register_args(spec, prefix)

        self.setup_py('register', *args)

    def register_args(self, spec, prefix):
        """Arguments to pass to register."""
        return []

    def bdist(self, spec, prefix):
        """Create a built (binary) distribution."""
        args = self.bdist_args(spec, prefix)

        self.setup_py('bdist', *args)

    def bdist_args(self, spec, prefix):
        """Arguments to pass to bdist."""
        return []

    def bdist_dumb(self, spec, prefix):
        '''Create a "dumb" built distribution.'''
        args = self.bdist_dumb_args(spec, prefix)

        self.setup_py('bdist_dumb', *args)

    def bdist_dumb_args(self, spec, prefix):
        """Arguments to pass to bdist_dumb."""
        return []

    def bdist_rpm(self, spec, prefix):
        """Create an RPM distribution."""
        args = self.bdist_rpm(spec, prefix)

        self.setup_py('bdist_rpm', *args)

    def bdist_rpm_args(self, spec, prefix):
        """Arguments to pass to bdist_rpm."""
        return []

    def bdist_wininst(self, spec, prefix):
        """Create an executable installer for MS Windows."""
        args = self.bdist_wininst_args(spec, prefix)

        self.setup_py('bdist_wininst', *args)

    def bdist_wininst_args(self, spec, prefix):
        """Arguments to pass to bdist_wininst."""
        return []

    def upload(self, spec, prefix):
        """Upload binary package to PyPI."""
        args = self.upload_args(spec, prefix)

        self.setup_py('upload', *args)

    def upload_args(self, spec, prefix):
        """Arguments to pass to upload."""
        return []

    def check(self, spec, prefix):
        """Perform some checks on the package."""
        args = self.check_args(spec, prefix)

        self.setup_py('check', *args)

    def check_args(self, spec, prefix):
        """Arguments to pass to check."""
        return []

    # Testing

    def test(self):
        """Run unit tests after in-place build.

        These tests are only run if the package actually has a 'test' command.
        """
        if self._setup_command_available('test'):
            args = self.test_args(self.spec, self.prefix)

            self.setup_py('test', *args)

    def test_args(self, spec, prefix):
        """Arguments to pass to test."""
        return []

    run_after('build')(PackageBase._run_default_build_time_test_callbacks)

    def import_module_test(self):
        """Attempts to import the module that was just installed.

        This test is only run if the package overrides
        :py:attr:`import_modules` with a list of module names."""

        # Make sure we are importing the installed modules,
        # not the ones in the current directory
        with working_dir('..'):
            for module in self.import_modules:
                self.python('-c', 'import {0}'.format(module))

    run_after('install')(PackageBase._run_default_install_time_test_callbacks)

    # Check that self.prefix is there after installation
    run_after('install')(PackageBase.sanity_check_prefix)

    def view_file_conflicts(self, view, merge_map):
        """Report all file conflicts, excepting special cases for python.
           Specifically, this does not report errors for duplicate
           __init__.py files for packages in the same namespace.
        """
        conflicts = list(dst for src, dst in merge_map.items()
                         if os.path.exists(dst))

        if conflicts and self.py_namespace:
            ext_map = view.extensions_layout.extension_map(self.extendee_spec)
            namespaces = set(x.package.py_namespace for x in ext_map.values())
            namespace_re = (r'site-packages/{0}/__init__.py'.format(
                self.py_namespace))
            find_namespace = match_predicate(namespace_re)
            if self.py_namespace in namespaces:
                conflicts = list(x for x in conflicts if not find_namespace(x))

        return conflicts

    def add_files_to_view(self, view, merge_map):
        bin_dir = self.spec.prefix.bin
        python_prefix = self.extendee_spec.prefix
        global_view = same_path(python_prefix, view.root)
        for src, dst in merge_map.items():
            if os.path.exists(dst):
                continue
            elif global_view or not path_contains_subdirectory(src, bin_dir):
                view.link(src, dst)
            elif not os.path.islink(src):
                shutil.copy2(src, dst)
                if 'script' in get_filetype(src):
                    filter_file(python_prefix, os.path.abspath(view.root), dst)
            else:
                orig_link_target = os.path.realpath(src)
                new_link_target = os.path.abspath(merge_map[orig_link_target])
                view.link(new_link_target, dst)

    def remove_files_from_view(self, view, merge_map):
        ignore_namespace = False
        if self.py_namespace:
            ext_map = view.extensions_layout.extension_map(self.extendee_spec)
            remaining_namespaces = set(spec.package.py_namespace
                                       for name, spec in ext_map.items()
                                       if name != self.name)
            if self.py_namespace in remaining_namespaces:
                namespace_init = match_predicate(
                    r'site-packages/{0}/__init__.py'.format(self.py_namespace))
                ignore_namespace = True

        bin_dir = self.spec.prefix.bin
        global_view = self.extendee_spec.prefix == view.root
        for src, dst in merge_map.items():
            if ignore_namespace and namespace_init(dst):
                continue

            if global_view or not path_contains_subdirectory(src, bin_dir):
                view.remove_file(src, dst)
            else:
                os.remove(dst)
Exemplo n.º 17
0
class CudaPackage(PackageBase):
    """Auxiliary class which contains CUDA variant, dependencies and conflicts
    and is meant to unify and facilitate its usage.

    Maintainers: ax3l, Rombur, davidbeckingsale
    """

    # https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
    # https://developer.nvidia.com/cuda-gpus
    # https://en.wikipedia.org/wiki/CUDA#GPUs_supported
    cuda_arch_values = (
        '10', '11', '12', '13',
        '20', '21',
        '30', '32', '35', '37',
        '50', '52', '53',
        '60', '61', '62',
        '70', '72', '75',
        '80', '86'
    )

    # FIXME: keep cuda and cuda_arch separate to make usage easier until
    # Spack has depends_on(cuda, when='cuda_arch!=None') or alike
    variant('cuda', default=False,
            description='Build with CUDA')

    variant('cuda_arch',
            description='CUDA architecture',
            values=spack.variant.any_combination_of(*cuda_arch_values),
            when='+cuda')

    # https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#nvcc-examples
    # https://llvm.org/docs/CompileCudaWithLLVM.html#compiling-cuda-code
    @staticmethod
    def cuda_flags(arch_list):
        return [('--generate-code arch=compute_{0},code=sm_{0} '
                 '--generate-code arch=compute_{0},code=compute_{0}').format(s)
                for s in arch_list]

    depends_on('cuda', when='+cuda')

    # CUDA version vs Architecture
    # https://en.wikipedia.org/wiki/CUDA#GPUs_supported
    # https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html#deprecated-features
    depends_on('cuda@:6.0',     when='cuda_arch=10')
    depends_on('cuda@:6.5',     when='cuda_arch=11')
    depends_on('[email protected]:6.5',  when='cuda_arch=12')
    depends_on('[email protected]:6.5',  when='cuda_arch=13')

    depends_on('[email protected]:8.0',  when='cuda_arch=20')
    depends_on('[email protected]:8.0',  when='cuda_arch=21')

    depends_on('[email protected]:10.2', when='cuda_arch=30')
    depends_on('[email protected]:10.2', when='cuda_arch=32')
    depends_on('[email protected]:',     when='cuda_arch=35')
    depends_on('[email protected]:',     when='cuda_arch=37')

    depends_on('[email protected]:',     when='cuda_arch=50')
    depends_on('[email protected]:',     when='cuda_arch=52')
    depends_on('[email protected]:',     when='cuda_arch=53')

    depends_on('[email protected]:',     when='cuda_arch=60')
    depends_on('[email protected]:',     when='cuda_arch=61')
    depends_on('[email protected]:',     when='cuda_arch=62')

    depends_on('[email protected]:',     when='cuda_arch=70')
    depends_on('[email protected]:',     when='cuda_arch=72')
    depends_on('[email protected]:',    when='cuda_arch=75')

    depends_on('[email protected]:',    when='cuda_arch=80')
    depends_on('[email protected]:',    when='cuda_arch=86')

    # From the NVIDIA install guide we know of conflicts for particular
    # platforms (linux, darwin), architectures (x86, powerpc) and compilers
    # (gcc, clang). We don't restrict %gcc and %clang conflicts to
    # platform=linux, since they should also apply to platform=cray, and may
    # apply to platform=darwin. We currently do not provide conflicts for
    # platform=darwin with %apple-clang.

    # Linux x86_64 compiler conflicts from here:
    # https://gist.github.com/ax3l/9489132

    # GCC
    # According to
    # https://github.com/spack/spack/pull/25054#issuecomment-886531664
    # these conflicts are valid independently from the architecture

    # minimum supported versions
    conflicts('%gcc@:4', when='+cuda ^[email protected]:')
    conflicts('%gcc@:5', when='+cuda ^[email protected]:')

    # maximum supported version
    # NOTE:
    # in order to not constrain future cuda version to old gcc versions,
    # it has been decided to use an upper bound for the latest version.
    # This implies that the last one in the list has to be updated at
    # each release of a new cuda minor version.
    conflicts('%gcc@10:', when='+cuda ^cuda@:11.0')
    conflicts('%gcc@12:', when='+cuda ^cuda@:11.6')
    conflicts('%clang@13:', when='+cuda ^cuda@:11.5')
    conflicts('%clang@14:', when='+cuda ^cuda@:11.6')

    # https://gist.github.com/ax3l/9489132#gistcomment-3860114
    conflicts('%gcc@10', when='+cuda ^cuda@:11.4.0')
    conflicts('%gcc@5:', when='+cuda ^cuda@:7.5 target=x86_64:')
    conflicts('%gcc@6:', when='+cuda ^cuda@:8 target=x86_64:')
    conflicts('%gcc@7:', when='+cuda ^cuda@:9.1 target=x86_64:')
    conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130 target=x86_64:')
    conflicts('%gcc@9:', when='+cuda ^cuda@:10.2.89 target=x86_64:')
    conflicts('%pgi@:14.8', when='+cuda ^cuda@:7.0.27 target=x86_64:')
    conflicts('%pgi@:15.3,15.5:', when='+cuda ^[email protected] target=x86_64:')
    conflicts('%pgi@:16.2,16.0:16.3', when='+cuda ^cuda@8 target=x86_64:')
    conflicts('%pgi@:15,18:', when='+cuda ^[email protected]:9.1 target=x86_64:')
    conflicts('%pgi@:16,19:', when='+cuda ^[email protected]:10 target=x86_64:')
    conflicts('%pgi@:17,20:', when='+cuda ^[email protected]:10.2.89 target=x86_64:')
    conflicts('%pgi@:17,21:', when='+cuda ^[email protected]:11.1.0 target=x86_64:')
    conflicts('%clang@:3.4', when='+cuda ^cuda@:7.5 target=x86_64:')
    conflicts('%clang@:3.7,4:', when='+cuda ^[email protected]:9.0 target=x86_64:')
    conflicts('%clang@:3.7,4.1:', when='+cuda ^[email protected] target=x86_64:')
    conflicts('%clang@:3.7,5.1:', when='+cuda ^[email protected] target=x86_64:')
    conflicts('%clang@:3.7,6.1:', when='+cuda ^[email protected] target=x86_64:')
    conflicts('%clang@:3.7,7.1:', when='+cuda ^[email protected] target=x86_64:')
    conflicts('%clang@:3.7,8.1:',
              when='+cuda ^[email protected]:10.1.243 target=x86_64:')
    conflicts('%clang@:3.2,9:', when='+cuda ^[email protected] target=x86_64:')
    conflicts('%clang@:5', when='+cuda ^[email protected]: target=x86_64:')
    conflicts('%clang@10:', when='+cuda ^cuda@:11.0.3 target=x86_64:')
    conflicts('%clang@11:', when='+cuda ^cuda@:11.1.0 target=x86_64:')

    # x86_64 vs. ppc64le differ according to NVidia docs
    # Linux ppc64le compiler conflicts from Table from the docs below:
    # https://docs.nvidia.com/cuda/cuda-installation-guide-linux/index.html
    # https://docs.nvidia.com/cuda/archive/9.2/cuda-installation-guide-linux/index.html
    # https://docs.nvidia.com/cuda/archive/9.1/cuda-installation-guide-linux/index.html
    # https://docs.nvidia.com/cuda/archive/9.0/cuda-installation-guide-linux/index.html
    # https://docs.nvidia.com/cuda/archive/8.0/cuda-installation-guide-linux/index.html

    # information prior to CUDA 9 difficult to find
    conflicts('%gcc@6:', when='+cuda ^cuda@:9 target=ppc64le:')
    conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130 target=ppc64le:')
    conflicts('%gcc@9:', when='+cuda ^cuda@:10.1.243 target=ppc64le:')
    # officially, CUDA 11.0.2 only supports the system GCC 8.3 on ppc64le
    conflicts('%pgi', when='+cuda ^cuda@:8 target=ppc64le:')
    conflicts('%pgi@:16', when='+cuda ^cuda@:9.1.185 target=ppc64le:')
    conflicts('%pgi@:17', when='+cuda ^cuda@:10 target=ppc64le:')
    conflicts('%clang@4:', when='+cuda ^cuda@:9.0.176 target=ppc64le:')
    conflicts('%clang@5:', when='+cuda ^cuda@:9.1 target=ppc64le:')
    conflicts('%clang@6:', when='+cuda ^cuda@:9.2 target=ppc64le:')
    conflicts('%clang@7:', when='+cuda ^[email protected] target=ppc64le:')
    conflicts('%[email protected]:', when='+cuda ^cuda@:10.1.105 target=ppc64le:')
    conflicts('%[email protected]:', when='+cuda ^cuda@:10.2.89 target=ppc64le:')
    conflicts('%clang@:5', when='+cuda ^[email protected]: target=ppc64le:')
    conflicts('%clang@10:', when='+cuda ^cuda@:11.0.2 target=ppc64le:')
    conflicts('%clang@11:', when='+cuda ^cuda@:11.1.0 target=ppc64le:')

    # Intel is mostly relevant for x86_64 Linux, even though it also
    # exists for Mac OS X. No information prior to CUDA 3.2 or Intel 11.1
    conflicts('%intel@:11.0', when='+cuda ^cuda@:3.1')
    conflicts('%intel@:12.0', when='+cuda ^[email protected]:')
    conflicts('%intel@:13.0', when='+cuda ^[email protected]:')
    conflicts('%intel@:13.2', when='+cuda ^[email protected]:')
    conflicts('%intel@:14.9', when='+cuda ^cuda@7:')
    # Intel 15.x is compatible with CUDA 7 thru current CUDA
    conflicts('%[email protected]:', when='+cuda ^cuda@:8.0.43')
    conflicts('%[email protected]:', when='+cuda ^cuda@:8.0.60')
    conflicts('%[email protected]:', when='+cuda ^cuda@:9.9')
    conflicts('%[email protected]:', when='+cuda ^cuda@:10.0')
    conflicts('%[email protected]:', when='+cuda ^cuda@:10.1')
    conflicts('%[email protected]:', when='+cuda ^cuda@:11.1.0')

    # XL is mostly relevant for ppc64le Linux
    conflicts('%xl@:12,14:', when='+cuda ^cuda@:9.1')
    conflicts('%xl@:12,14:15,17:', when='+cuda ^[email protected]')
    conflicts('%xl@:12,17:', when='+cuda ^cuda@:11.1.0')

    # Darwin.
    # TODO: add missing conflicts for %apple-clang cuda@:10
    conflicts('platform=darwin', when='+cuda ^[email protected]: ')

    # Make sure cuda_arch can not be used without +cuda
    for value in cuda_arch_values:
        conflicts('~cuda', when='cuda_arch=' + value)
Exemplo n.º 18
0
class CMakePackage(PackageBase):
    """Specialized class for packages built using CMake

    For more information on the CMake build system, see:
    https://cmake.org/cmake/help/latest/

    This class provides three phases that can be overridden:

        1. :py:meth:`~.CMakePackage.cmake`
        2. :py:meth:`~.CMakePackage.build`
        3. :py:meth:`~.CMakePackage.install`

    They all have sensible defaults and for many packages the only thing
    necessary will be to override :py:meth:`~.CMakePackage.cmake_args`.
    For a finer tuning you may also override:

        +-----------------------------------------------+--------------------+
        | **Method**                                    | **Purpose**        |
        +===============================================+====================+
        | :py:meth:`~.CMakePackage.root_cmakelists_dir` | Location of the    |
        |                                               | root CMakeLists.txt|
        +-----------------------------------------------+--------------------+
        | :py:meth:`~.CMakePackage.build_directory`     | Directory where to |
        |                                               | build the package  |
        +-----------------------------------------------+--------------------+


    The generator used by CMake can be specified by providing the
    generator attribute. Per
    https://cmake.org/cmake/help/git-master/manual/cmake-generators.7.html,
    the format is: [<secondary-generator> - ]<primary_generator>. The
    full list of primary and secondary generators supported by CMake may
    be found in the documentation for the version of CMake used;
    however, at this time Spack supports only the primary generators
    "Unix Makefiles" and "Ninja." Spack's CMake support is agnostic with
    respect to primary generators. Spack will generate a runtime error
    if the generator string does not follow the prescribed format, or if
    the primary generator is not supported.
    """
    #: Phases of a CMake package
    phases = ['cmake', 'build', 'install']
    #: This attribute is used in UI queries that need to know the build
    #: system base class
    build_system_class = 'CMakePackage'

    build_targets = []  # type: List[str]
    install_targets = ['install']

    build_time_test_callbacks = ['check']

    #: The build system generator to use.
    #:
    #: See ``cmake --help`` for a list of valid generators.
    #: Currently, "Unix Makefiles" and "Ninja" are the only generators
    #: that Spack supports. Defaults to "Unix Makefiles".
    #:
    #: See https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html
    #: for more information.
    generator = 'Unix Makefiles'

    # https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
    variant('build_type', default='RelWithDebInfo',
            description='CMake build type',
            values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel'))

    # https://cmake.org/cmake/help/latest/variable/CMAKE_INTERPROCEDURAL_OPTIMIZATION.html
    variant('ipo', default=False,
            description='CMake interprocedural optimization')
    # CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
    conflicts('+ipo', when='^cmake@:3.8',
              msg='+ipo is not supported by CMake < 3.9')

    depends_on('cmake', type='build')

    @property
    def archive_files(self):
        """Files to archive for packages based on CMake"""
        return [os.path.join(self.build_directory, 'CMakeCache.txt')]

    @property
    def root_cmakelists_dir(self):
        """The relative path to the directory containing CMakeLists.txt

        This path is relative to the root of the extracted tarball,
        not to the ``build_directory``. Defaults to the current directory.

        :return: directory containing CMakeLists.txt
        """
        return self.stage.source_path

    @property
    def std_cmake_args(self):
        """Standard cmake arguments provided as a property for
        convenience of package writers

        :return: standard cmake arguments
        """
        # standard CMake arguments
        std_cmake_args = CMakePackage._std_args(self)
        std_cmake_args += getattr(self, 'cmake_flag_args', [])
        return std_cmake_args

    @staticmethod
    def _std_args(pkg):
        """Computes the standard cmake arguments for a generic package"""
        try:
            generator = pkg.generator
        except AttributeError:
            generator = 'Unix Makefiles'

        # Make sure a valid generator was chosen
        valid_primary_generators = ['Unix Makefiles', 'Ninja']
        primary_generator = _extract_primary_generator(generator)
        if primary_generator not in valid_primary_generators:
            msg  = "Invalid CMake generator: '{0}'\n".format(generator)
            msg += "CMakePackage currently supports the following "
            msg += "primary generators: '{0}'".\
                   format("', '".join(valid_primary_generators))
            raise InstallError(msg)

        try:
            build_type = pkg.spec.variants['build_type'].value
        except KeyError:
            build_type = 'RelWithDebInfo'

        try:
            ipo = pkg.spec.variants['ipo'].value
        except KeyError:
            ipo = False

        define = CMakePackage.define
        args = [
            '-G', generator,
            define('CMAKE_INSTALL_PREFIX', pkg.prefix),
            define('CMAKE_BUILD_TYPE', build_type),
        ]

        # CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
        if pkg.spec.satisfies('^[email protected]:'):
            args.append(define('CMAKE_INTERPROCEDURAL_OPTIMIZATION', ipo))

        if primary_generator == 'Unix Makefiles':
            args.append(define('CMAKE_VERBOSE_MAKEFILE', True))

        if platform.mac_ver()[0]:
            args.extend([
                define('CMAKE_FIND_FRAMEWORK', "LAST"),
                define('CMAKE_FIND_APPBUNDLE', "LAST"),
            ])

        # Set up CMake rpath
        args.extend([
            define('CMAKE_INSTALL_RPATH_USE_LINK_PATH', False),
            define('CMAKE_INSTALL_RPATH',
                   spack.build_environment.get_rpaths(pkg)),
        ])
        # CMake's find_package() looks in CMAKE_PREFIX_PATH first, help CMake
        # to find immediate link dependencies in right places:
        deps = [d.prefix for d in
                pkg.spec.dependencies(deptype=('build', 'link'))]
        deps = filter_system_paths(deps)
        args.append(define('CMAKE_PREFIX_PATH', deps))
        return args

    @staticmethod
    def define(cmake_var, value):
        """Return a CMake command line argument that defines a variable.

        The resulting argument will convert boolean values to OFF/ON
        and lists/tuples to CMake semicolon-separated string lists. All other
        values will be interpreted as strings.

        Examples:

            .. code-block:: python

                [define('BUILD_SHARED_LIBS', True),
                 define('CMAKE_CXX_STANDARD', 14),
                 define('swr', ['avx', 'avx2'])]

            will generate the following configuration options:

            .. code-block:: console

                ["-DBUILD_SHARED_LIBS:BOOL=ON",
                 "-DCMAKE_CXX_STANDARD:STRING=14",
                 "-DSWR:STRING=avx;avx2]

        """
        # Create a list of pairs. Each pair includes a configuration
        # option and whether or not that option is activated
        if isinstance(value, bool):
            kind = 'BOOL'
            value = "ON" if value else "OFF"
        else:
            kind = 'STRING'
            if isinstance(value, (list, tuple)):
                value = ";".join(str(v) for v in value)
            else:
                value = str(value)

        return "".join(["-D", cmake_var, ":", kind, "=", value])

    def define_from_variant(self, cmake_var, variant=None):
        """Return a CMake command line argument from the given variant's value.

        The optional ``variant`` argument defaults to the lower-case transform
        of ``cmake_var``.

        This utility function is similar to
        :py:meth:`~.AutotoolsPackage.with_or_without`.

        Examples:

            Given a package with:

            .. code-block:: python

                variant('cxxstd', default='11', values=('11', '14'),
                        multi=False, description='')
                variant('shared', default=True, description='')
                variant('swr', values=any_combination_of('avx', 'avx2'),
                        description='')

            calling this function like:

            .. code-block:: python

                [define_from_variant('BUILD_SHARED_LIBS', 'shared'),
                 define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'),
                 define_from_variant('SWR')]

            will generate the following configuration options:

            .. code-block:: console

                ["-DBUILD_SHARED_LIBS:BOOL=ON",
                 "-DCMAKE_CXX_STANDARD:STRING=14",
                 "-DSWR:STRING=avx;avx2]

            for ``<spec-name> cxxstd=14 +shared swr=avx,avx2``
        """

        if variant is None:
            variant = cmake_var.lower()

        if variant not in self.variants:
            raise KeyError(
                '"{0}" is not a variant of "{1}"'.format(variant, self.name))

        value = self.spec.variants[variant].value
        if isinstance(value, (tuple, list)):
            # Sort multi-valued variants for reproducibility
            value = sorted(value)

        return self.define(cmake_var, value)

    def flags_to_build_system_args(self, flags):
        """Produces a list of all command line arguments to pass the specified
        compiler flags to cmake. Note CMAKE does not have a cppflags option,
        so cppflags will be added to cflags, cxxflags, and fflags to mimic the
        behavior in other tools."""
        # Has to be dynamic attribute due to caching
        setattr(self, 'cmake_flag_args', [])

        flag_string = '-DCMAKE_{0}_FLAGS={1}'
        langs = {'C': 'c', 'CXX': 'cxx', 'Fortran': 'f'}

        # Handle language compiler flags
        for lang, pre in langs.items():
            flag = pre + 'flags'
            # cmake has no explicit cppflags support -> add it to all langs
            lang_flags = ' '.join(flags.get(flag, []) + flags.get('cppflags',
                                                                  []))
            if lang_flags:
                self.cmake_flag_args.append(flag_string.format(lang,
                                                               lang_flags))

        # Cmake has different linker arguments for different build types.
        # We specify for each of them.
        if flags['ldflags']:
            ldflags = ' '.join(flags['ldflags'])
            ld_string = '-DCMAKE_{0}_LINKER_FLAGS={1}'
            # cmake has separate linker arguments for types of builds.
            for type in ['EXE', 'MODULE', 'SHARED', 'STATIC']:
                self.cmake_flag_args.append(ld_string.format(type, ldflags))

        # CMake has libs options separated by language. Apply ours to each.
        if flags['ldlibs']:
            libs_flags = ' '.join(flags['ldlibs'])
            libs_string = '-DCMAKE_{0}_STANDARD_LIBRARIES={1}'
            for lang in langs:
                self.cmake_flag_args.append(libs_string.format(lang,
                                                               libs_flags))

    @property
    def build_dirname(self):
        """Returns the directory name to use when building the package

        :return: name of the subdirectory for building the package
        """
        return 'spack-build-%s' % self.spec.dag_hash(7)

    @property
    def build_directory(self):
        """Returns the directory to use when building the package

        :return: directory where to build the package
        """
        return os.path.join(self.stage.path, self.build_dirname)

    def cmake_args(self):
        """Produces a list containing all the arguments that must be passed to
        cmake, except:

            * CMAKE_INSTALL_PREFIX
            * CMAKE_BUILD_TYPE

        which will be set automatically.

        :return: list of arguments for cmake
        """
        return []

    def cmake(self, spec, prefix):
        """Runs ``cmake`` in the build directory"""
        options = self.std_cmake_args
        options += self.cmake_args()
        options.append(os.path.abspath(self.root_cmakelists_dir))
        with working_dir(self.build_directory, create=True):
            inspect.getmodule(self).cmake(*options)

    def build(self, spec, prefix):
        """Make the build targets"""
        with working_dir(self.build_directory):
            if self.generator == 'Unix Makefiles':
                inspect.getmodule(self).make(*self.build_targets)
            elif self.generator == 'Ninja':
                self.build_targets.append("-v")
                inspect.getmodule(self).ninja(*self.build_targets)

    def install(self, spec, prefix):
        """Make the install targets"""
        with working_dir(self.build_directory):
            if self.generator == 'Unix Makefiles':
                inspect.getmodule(self).make(*self.install_targets)
            elif self.generator == 'Ninja':
                inspect.getmodule(self).ninja(*self.install_targets)

    run_after('build')(PackageBase._run_default_build_time_test_callbacks)

    def check(self):
        """Searches the CMake-generated Makefile for the target ``test``
        and runs it if found.
        """
        with working_dir(self.build_directory):
            if self.generator == 'Unix Makefiles':
                self._if_make_target_execute('test',
                                             jobs_env='CTEST_PARALLEL_LEVEL')
                self._if_make_target_execute('check')
            elif self.generator == 'Ninja':
                self._if_ninja_target_execute('test',
                                              jobs_env='CTEST_PARALLEL_LEVEL')
                self._if_ninja_target_execute('check')

    # Check that self.prefix is there after installation
    run_after('install')(PackageBase.sanity_check_prefix)
Exemplo n.º 19
0
class WafPackage(PackageBase):
    """Specialized class for packages that are built using the
    Waf build system. See https://waf.io/book/ for more information.

    This class provides the following phases that can be overridden:

    * configure
    * build
    * install

    These are all standard Waf commands and can be found by running:

    .. code-block:: console

       $ python waf --help

    Each phase provides a function <phase> that runs:

    .. code-block:: console

       $ python waf -j<jobs> <phase>

    where <jobs> is the number of parallel jobs to build with. Each phase
    also has a <phase_args> function that can pass arguments to this call.
    All of these functions are empty except for the ``configure_args``
    function, which passes ``--prefix=/path/to/installation/prefix``.
    """
    # Default phases
    phases = ['configure', 'build', 'install']

    # To be used in UI queries that require to know which
    # build-system class we are using
    build_system_class = 'WafPackage'

    # Callback names for build-time test
    build_time_test_callbacks = ['build_test']

    # Callback names for install-time test
    install_time_test_callbacks = ['install_test']

    # Much like AutotoolsPackage does not require automake and autoconf
    # to build, WafPackage does not require waf to build. It only requires
    # python to run the waf build script.
    depends_on('[email protected]:', type='build')

    @property
    def build_directory(self):
        """The directory containing the ``waf`` file."""
        return self.stage.source_path

    def python(self, *args, **kwargs):
        """The python ``Executable``."""
        inspect.getmodule(self).python(*args, **kwargs)

    def waf(self, *args, **kwargs):
        """Runs the waf ``Executable``."""
        jobs = inspect.getmodule(self).make_jobs

        with working_dir(self.build_directory):
            self.python('waf', '-j{0}'.format(jobs), *args, **kwargs)

    def configure(self, spec, prefix):
        """Configures the project."""
        args = ['--prefix={0}'.format(self.prefix)]
        args += self.configure_args()

        self.waf('configure', *args)

    def configure_args(self):
        """Arguments to pass to configure."""
        return []

    def build(self, spec, prefix):
        """Executes the build."""
        args = self.build_args()

        self.waf('build', *args)

    def build_args(self):
        """Arguments to pass to build."""
        return []

    def install(self, spec, prefix):
        """Installs the targets on the system."""
        args = self.install_args()

        self.waf('install', *args)

    def install_args(self):
        """Arguments to pass to install."""
        return []

    # Testing

    def build_test(self):
        """Run unit tests after build.

        By default, does nothing. Override this if you want to
        add package-specific tests.
        """
        pass

    run_after('build')(PackageBase._run_default_build_time_test_callbacks)

    def install_test(self):
        """Run unit tests after install.

        By default, does nothing. Override this if you want to
        add package-specific tests.
        """
        pass

    run_after('install')(PackageBase._run_default_install_time_test_callbacks)

    # Check that self.prefix is there after installation
    run_after('install')(PackageBase.sanity_check_prefix)
Exemplo n.º 20
0
class Oasis(MakefilePackage):
    """The OASIS coupler is a software allowing synchronized exchanges
    of coupling information between numerical codes representing
    different components of the climate system."""

    homepage = "https://portal.enes.org/oasis"
    git = 'https://gitlab.com/cerfacs/oasis3-mct.git'
    maintainers = ['leclairm']

    version('master', branch='OASIS3-MCT_4.0')
    version('dev-build', branch='OASIS3-MCT_4.0')

    depends_on('mpi', type=('build', 'link', 'run'))
    depends_on('netcdf-fortran', type=('build', 'link', 'run'))

    build_directory = 'util/make_dir'

    makefile_file = 'TopMakefileOasis3'

    # Relative path where the built libraries are stored (corresponds
    # to the absolute path called ARCHDIR in the Makefile)
    rel_ARCHDIR = 'spack-build'

    def setup_build_environment(self, env):

        CHAN = 'MPI1'
        env.set('CHAN', CHAN)
        env.set('F90', self.spec['mpi'].mpifc)
        env.set('f90', self.spec['mpi'].mpifc)
        env.set('F', self.spec['mpi'].mpifc)
        env.set('f', self.spec['mpi'].mpifc)
        env.set('MAKE', 'gmake')

        LIBBUILD = os.path.join('../..', self.rel_ARCHDIR, 'build/lib')
        INCPSMILE = '-I{LIBBUILD}/psmile.{CHAN} -I{LIBBUILD}/mct -I{LIBBUILD}/scrip'.format(
            LIBBUILD=LIBBUILD, CHAN=CHAN)

        CPPDEF = '-Duse_comm_{CHAN} -D__VERBOSE -DTREAT_OVERLAY -D__NO_16BYTE_REALS'.format(
            CHAN=CHAN)
        env.set('CPPDEF', CPPDEF)

        FFLAGS = '-O2 {INCPSMILE} {CPPDEF}'.format(CPPDEF=CPPDEF,
                                                   INCPSMILE=INCPSMILE)
        env.set('F90FLAGS', FFLAGS)
        env.set('f90FLAGS', FFLAGS)
        env.set('FFLAGS', FFLAGS)
        env.set('fFLAGS', FFLAGS)
        env.set('CCFLAGS', FFLAGS)

    def edit(self, spec, prefix):

        COUPLE = os.getcwd()
        ARCHDIR = os.path.join(COUPLE, self.rel_ARCHDIR)
        with working_dir(self.build_directory):
            makefile = FileFilter(self.makefile_file)
            makefile.filter(
                'include make.inc',
                'export COUPLE = {}\nexport ARCHDIR = {}'.format(
                    COUPLE, ARCHDIR))
            makefile.filter('\$\(modifmakefile\)\s\;\s', '')

    def patch(self):

        # Remove old directives for Fujitsu comilers. Already fixed in MCT [1] but not merged in OASIS yet
        # [1] https://github.com/MCSclimate/MCT/commit/dcb4fa4527bbc51729fb67fbc2e0179bfcb4baa2
        with working_dir('lib/mct/mct'):
            m_AttrVect = FileFilter('m_AttrVect.F90')
            m_AttrVect.filter('\s*\!DIR\$ COLLAPSE', '')

    def build(self, spec, prefix):

        with working_dir(self.build_directory):
            make('-f', self.makefile_file)

    def install(self, spec, prefix):

        with working_dir(os.path.join(self.rel_ARCHDIR, 'lib')):
            os.symlink('libmct.a', 'libmct_oasis.a')
            os.symlink('libmpeu.a', 'libmpeu_oasis.a')

        install_tree(self.rel_ARCHDIR, prefix)
Exemplo n.º 21
0
class CudaPackage(PackageBase):
    """Auxiliary class which contains CUDA variant, dependencies and conflicts
    and is meant to unify and facilitate its usage.

    Maintainers: ax3l, Rombur
    """

    # https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
    # https://developer.nvidia.com/cuda-gpus
    # https://en.wikipedia.org/wiki/CUDA#GPUs_supported
    cuda_arch_values = ('10', '11', '12', '13', '20', '21', '30', '32', '35',
                        '37', '50', '52', '53', '60', '61', '62', '70', '72',
                        '75', '80', '86')

    # FIXME: keep cuda and cuda_arch separate to make usage easier until
    # Spack has depends_on(cuda, when='cuda_arch!=None') or alike
    variant('cuda', default=False, description='Build with CUDA')

    variant('cuda_arch',
            description='CUDA architecture',
            values=spack.variant.any_combination_of(*cuda_arch_values))

    # https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#nvcc-examples
    # https://llvm.org/docs/CompileCudaWithLLVM.html#compiling-cuda-code
    @staticmethod
    def cuda_flags(arch_list):
        return [('--generate-code arch=compute_{0},code=sm_{0} '
                 '--generate-code arch=compute_{0},code=compute_{0}').format(s)
                for s in arch_list]

    depends_on('cuda', when='+cuda')

    # CUDA version vs Architecture
    # https://en.wikipedia.org/wiki/CUDA#GPUs_supported
    # https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html#deprecated-features
    depends_on('cuda@:6.0', when='cuda_arch=10')
    depends_on('cuda@:6.5', when='cuda_arch=11')
    depends_on('[email protected]:6.5', when='cuda_arch=12')
    depends_on('[email protected]:6.5', when='cuda_arch=13')

    depends_on('[email protected]:8.0', when='cuda_arch=20')
    depends_on('[email protected]:8.0', when='cuda_arch=21')

    depends_on('[email protected]:10.2', when='cuda_arch=30')
    depends_on('[email protected]:10.2', when='cuda_arch=32')
    depends_on('[email protected]:', when='cuda_arch=35')
    depends_on('[email protected]:', when='cuda_arch=37')

    depends_on('[email protected]:', when='cuda_arch=50')
    depends_on('[email protected]:', when='cuda_arch=52')
    depends_on('[email protected]:', when='cuda_arch=53')

    depends_on('[email protected]:', when='cuda_arch=60')
    depends_on('[email protected]:', when='cuda_arch=61')
    depends_on('[email protected]:', when='cuda_arch=62')

    depends_on('[email protected]:', when='cuda_arch=70')
    depends_on('[email protected]:', when='cuda_arch=72')
    depends_on('[email protected]:', when='cuda_arch=75')

    depends_on('[email protected]:', when='cuda_arch=80')
    depends_on('[email protected]:', when='cuda_arch=86')

    # There are at least three cases to be aware of for compiler conflicts
    # 1. Linux x86_64
    # 2. Linux ppc64le
    # 3. Mac OS X
    # CUDA-compiler conflicts are version-to-version specific and are
    # difficult to express with the current Spack conflict syntax

    # Linux x86_64 compiler conflicts from here:
    # https://gist.github.com/ax3l/9489132
    arch_platform = ' target=x86_64: platform=linux'
    conflicts('%gcc@5:', when='+cuda ^cuda@:7.5' + arch_platform)
    conflicts('%gcc@6:', when='+cuda ^cuda@:8' + arch_platform)
    conflicts('%gcc@7:', when='+cuda ^cuda@:9.1' + arch_platform)
    conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130' + arch_platform)
    conflicts('%gcc@9:', when='+cuda ^cuda@:10.2.89' + arch_platform)
    conflicts('%gcc@:4', when='+cuda ^[email protected]:' + arch_platform)
    conflicts('%gcc@10:', when='+cuda ^cuda@:11.0.2' + arch_platform)
    conflicts('%gcc@11:', when='+cuda ^cuda@:11.1.0' + arch_platform)
    conflicts('%pgi@:14.8', when='+cuda ^cuda@:7.0.27' + arch_platform)
    conflicts('%pgi@:15.3,15.5:', when='+cuda ^[email protected]' + arch_platform)
    conflicts('%pgi@:16.2,16.0:16.3', when='+cuda ^cuda@8' + arch_platform)
    conflicts('%pgi@:15,18:', when='+cuda ^[email protected]:9.1' + arch_platform)
    conflicts('%pgi@:16,19:', when='+cuda ^[email protected]:10' + arch_platform)
    conflicts('%pgi@:17,20:',
              when='+cuda ^[email protected]:10.2.89' + arch_platform)
    conflicts('%pgi@:17,21:', when='+cuda ^[email protected]:11.1.0' + arch_platform)
    conflicts('%clang@:3.4', when='+cuda ^cuda@:7.5' + arch_platform)
    conflicts('%clang@:3.7,4:', when='+cuda ^[email protected]:9.0' + arch_platform)
    conflicts('%clang@:3.7,4.1:', when='+cuda ^[email protected]' + arch_platform)
    conflicts('%clang@:3.7,5.1:', when='+cuda ^[email protected]' + arch_platform)
    conflicts('%clang@:3.7,6.1:', when='+cuda ^[email protected]' + arch_platform)
    conflicts('%clang@:3.7,7.1:', when='+cuda ^[email protected]' + arch_platform)
    conflicts('%clang@:3.7,8.1:',
              when='+cuda ^[email protected]:10.1.243' + arch_platform)
    conflicts('%clang@:3.2,9:', when='+cuda ^[email protected]' + arch_platform)
    conflicts('%clang@:5', when='+cuda ^[email protected]:' + arch_platform)
    conflicts('%clang@10:', when='+cuda ^cuda@:11.0.2' + arch_platform)
    conflicts('%clang@11:', when='+cuda ^cuda@:11.1.0' + arch_platform)

    # x86_64 vs. ppc64le differ according to NVidia docs
    # Linux ppc64le compiler conflicts from Table from the docs below:
    # https://docs.nvidia.com/cuda/cuda-installation-guide-linux/index.html
    # https://docs.nvidia.com/cuda/archive/9.2/cuda-installation-guide-linux/index.html
    # https://docs.nvidia.com/cuda/archive/9.1/cuda-installation-guide-linux/index.html
    # https://docs.nvidia.com/cuda/archive/9.0/cuda-installation-guide-linux/index.html
    # https://docs.nvidia.com/cuda/archive/8.0/cuda-installation-guide-linux/index.html

    arch_platform = ' target=ppc64le: platform=linux'
    # information prior to CUDA 9 difficult to find
    conflicts('%gcc@6:', when='+cuda ^cuda@:9' + arch_platform)
    conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130' + arch_platform)
    conflicts('%gcc@9:', when='+cuda ^cuda@:10.1.243' + arch_platform)
    # officially, CUDA 11.0.2 only supports the system GCC 8.3 on ppc64le
    conflicts('%gcc@:4', when='+cuda ^[email protected]:' + arch_platform)
    conflicts('%gcc@10:', when='+cuda ^cuda@:11.0.2' + arch_platform)
    conflicts('%gcc@11:', when='+cuda ^cuda@:11.1.0' + arch_platform)
    conflicts('%pgi', when='+cuda ^cuda@:8' + arch_platform)
    conflicts('%pgi@:16', when='+cuda ^cuda@:9.1.185' + arch_platform)
    conflicts('%pgi@:17', when='+cuda ^cuda@:10' + arch_platform)
    conflicts('%clang@4:', when='+cuda ^cuda@:9.0.176' + arch_platform)
    conflicts('%clang@5:', when='+cuda ^cuda@:9.1' + arch_platform)
    conflicts('%clang@6:', when='+cuda ^cuda@:9.2' + arch_platform)
    conflicts('%clang@7:', when='+cuda ^[email protected]' + arch_platform)
    conflicts('%[email protected]:', when='+cuda ^cuda@:10.1.105' + arch_platform)
    conflicts('%[email protected]:', when='+cuda ^cuda@:10.2.89' + arch_platform)
    conflicts('%clang@:5', when='+cuda ^[email protected]:' + arch_platform)
    conflicts('%clang@10:', when='+cuda ^cuda@:11.0.2' + arch_platform)
    conflicts('%clang@11:', when='+cuda ^cuda@:11.1.0' + arch_platform)

    # Intel is mostly relevant for x86_64 Linux, even though it also
    # exists for Mac OS X. No information prior to CUDA 3.2 or Intel 11.1
    conflicts('%intel@:11.0', when='+cuda ^cuda@:3.1')
    conflicts('%intel@:12.0', when='+cuda ^[email protected]:')
    conflicts('%intel@:13.0', when='+cuda ^[email protected]:')
    conflicts('%intel@:13.2', when='+cuda ^[email protected]:')
    conflicts('%intel@:14.9', when='+cuda ^cuda@7:')
    # Intel 15.x is compatible with CUDA 7 thru current CUDA
    conflicts('%[email protected]:', when='+cuda ^cuda@:8.0.43')
    conflicts('%[email protected]:', when='+cuda ^cuda@:8.0.60')
    conflicts('%[email protected]:', when='+cuda ^cuda@:9.9')
    conflicts('%[email protected]:', when='+cuda ^cuda@:10.0')
    conflicts('%[email protected]:', when='+cuda ^cuda@:10.1')
    conflicts('%[email protected]:', when='+cuda ^cuda@:11.1.0')

    # XL is mostly relevant for ppc64le Linux
    conflicts('%xl@:12,14:', when='+cuda ^cuda@:9.1')
    conflicts('%xl@:12,14:15,17:', when='+cuda ^[email protected]')
    conflicts('%xl@:12,17:', when='+cuda ^cuda@:11.1.0')

    # Mac OS X
    # platform = ' platform=darwin'
    # Apple XCode clang vs. LLVM clang are difficult to specify
    # with spack syntax. Xcode clang name is `[email protected]`
    # which precludes ranges being specified. We have proposed
    # rename XCode clang to `[email protected]` or even
    # `[email protected] as a possible fix.
    # Compiler conflicts will be eventual taken from here:
    # https://docs.nvidia.com/cuda/cuda-installation-guide-mac-os-x/index.html#abstract
    conflicts('platform=darwin', when='+cuda ^[email protected]:')

    # Make sure cuda_arch can not be used without +cuda
    for value in cuda_arch_values:
        conflicts('~cuda', when='cuda_arch=' + value)
Exemplo n.º 22
0
class MesonPackage(PackageBase):
    """Specialized class for packages built using Meson

    For more information on the Meson build system, see:
    https://mesonbuild.com/

    This class provides three phases that can be overridden:

        1. :py:meth:`~.MesonPackage.meson`
        2. :py:meth:`~.MesonPackage.build`
        3. :py:meth:`~.MesonPackage.install`

    They all have sensible defaults and for many packages the only thing
    necessary will be to override :py:meth:`~.MesonPackage.meson_args`.
    For a finer tuning you may also override:

        +-----------------------------------------------+--------------------+
        | **Method**                                    | **Purpose**        |
        +===============================================+====================+
        | :py:meth:`~.MesonPackage.root_mesonlists_dir` | Location of the    |
        |                                               | root MesonLists.txt|
        +-----------------------------------------------+--------------------+
        | :py:meth:`~.MesonPackage.build_directory`     | Directory where to |
        |                                               | build the package  |
        +-----------------------------------------------+--------------------+


    """
    #: Phases of a Meson package
    phases = ['meson', 'build', 'install']
    #: This attribute is used in UI queries that need to know the build
    #: system base class
    build_system_class = 'MesonPackage'

    build_targets = []  # type: List[str]
    install_targets = ['install']

    build_time_test_callbacks = ['check']

    variant('buildtype',
            default='debugoptimized',
            description='Meson build type',
            values=('plain', 'debug', 'debugoptimized', 'release', 'minsize'))
    variant('default_library',
            default='shared',
            description=' Default library type',
            values=('shared', 'static', 'both'))
    variant('strip', default=False, description='Strip targets on install')

    depends_on('meson', type='build')
    depends_on('ninja', type='build')

    @property
    def archive_files(self):
        """Files to archive for packages based on Meson"""
        return [os.path.join(self.build_directory, 'meson-logs/meson-log.txt')]

    @property
    def root_mesonlists_dir(self):
        """The relative path to the directory containing meson.build

        This path is relative to the root of the extracted tarball,
        not to the ``build_directory``. Defaults to the current directory.

        :return: directory containing meson.build
        """
        return self.stage.source_path

    @property
    def std_meson_args(self):
        """Standard meson arguments provided as a property for
        convenience of package writers

        :return: standard meson arguments
        """
        # standard Meson arguments
        std_meson_args = MesonPackage._std_args(self)
        std_meson_args += getattr(self, 'meson_flag_args', [])
        return std_meson_args

    @staticmethod
    def _std_args(pkg):
        """Computes the standard meson arguments for a generic package"""

        try:
            build_type = pkg.spec.variants['buildtype'].value
        except KeyError:
            build_type = 'release'

        strip = 'true' if '+strip' in pkg.spec else 'false'

        try:
            default_library = pkg.spec.variants['default_library'].value
        except KeyError:
            default_library = 'shared'

        args = [
            '--prefix={0}'.format(pkg.prefix),
            # If we do not specify libdir explicitly, Meson chooses something
            # like lib/x86_64-linux-gnu, which causes problems when trying to
            # find libraries and pkg-config files.
            # See https://github.com/mesonbuild/meson/issues/2197
            '--libdir={0}'.format(pkg.prefix.lib),
            '-Dbuildtype={0}'.format(build_type),
            '-Dstrip={0}'.format(strip),
            '-Ddefault_library={0}'.format(default_library)
        ]

        return args

    def flags_to_build_system_args(self, flags):
        """Produces a list of all command line arguments to pass the specified
        compiler flags to meson."""
        # Has to be dynamic attribute due to caching
        setattr(self, 'meson_flag_args', [])

    @property
    def build_directory(self):
        """Returns the directory to use when building the package

        :return: directory where to build the package
        """
        return os.path.join(self.stage.source_path, 'spack-build')

    def meson_args(self):
        """Produces a list containing all the arguments that must be passed to
        meson, except:

        * ``--prefix``
        * ``--libdir``
        * ``--buildtype``
        * ``--strip``
        * ``--default_library``

        which will be set automatically.

        :return: list of arguments for meson
        """
        return []

    def meson(self, spec, prefix):
        """Runs ``meson`` in the build directory"""
        options = [os.path.abspath(self.root_mesonlists_dir)]
        options += self.std_meson_args
        options += self.meson_args()
        with working_dir(self.build_directory, create=True):
            inspect.getmodule(self).meson(*options)

    def build(self, spec, prefix):
        """Make the build targets"""
        options = ['-v']
        options += self.build_targets
        with working_dir(self.build_directory):
            inspect.getmodule(self).ninja(*options)

    def install(self, spec, prefix):
        """Make the install targets"""
        with working_dir(self.build_directory):
            inspect.getmodule(self).ninja(*self.install_targets)

    run_after('build')(PackageBase._run_default_build_time_test_callbacks)

    def check(self):
        """Searches the Meson-generated file for the target ``test``
        and runs it if found.
        """
        with working_dir(self.build_directory):
            self._if_ninja_target_execute('test')
            self._if_ninja_target_execute('check')

    # Check that self.prefix is there after installation
    run_after('install')(PackageBase.sanity_check_prefix)
Exemplo n.º 23
0
class CMakePackage(PackageBase):
    """Specialized class for packages built using CMake

    For more information on the CMake build system, see:
    https://cmake.org/cmake/help/latest/

    This class provides three phases that can be overridden:

        1. :py:meth:`~.CMakePackage.cmake`
        2. :py:meth:`~.CMakePackage.build`
        3. :py:meth:`~.CMakePackage.install`

    They all have sensible defaults and for many packages the only thing
    necessary will be to override :py:meth:`~.CMakePackage.cmake_args`.
    For a finer tuning you may also override:

        +-----------------------------------------------+--------------------+
        | **Method**                                    | **Purpose**        |
        +===============================================+====================+
        | :py:meth:`~.CMakePackage.root_cmakelists_dir` | Location of the    |
        |                                               | root CMakeLists.txt|
        +-----------------------------------------------+--------------------+
        | :py:meth:`~.CMakePackage.build_directory`     | Directory where to |
        |                                               | build the package  |
        +-----------------------------------------------+--------------------+


    """
    #: Phases of a CMake package
    phases = ['cmake', 'build', 'install']
    #: This attribute is used in UI queries that need to know the build
    #: system base class
    build_system_class = 'CMakePackage'

    build_targets = []
    install_targets = ['install']

    build_time_test_callbacks = ['check']

    #: The build system generator to use.
    #:
    #: See ``cmake --help`` for a list of valid generators.
    #: Currently, "Unix Makefiles" and "Ninja" are the only generators
    #: that Spack supports. Defaults to "Unix Makefiles".
    #:
    #: See https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html
    #: for more information.
    generator = 'Unix Makefiles'

    # https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
    variant('build_type',
            default='RelWithDebInfo',
            description='CMake build type',
            values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel'))

    depends_on('cmake', type='build')

    @property
    def archive_files(self):
        """Files to archive for packages based on CMake"""
        return [os.path.join(self.build_directory, 'CMakeCache.txt')]

    @property
    def root_cmakelists_dir(self):
        """The relative path to the directory containing CMakeLists.txt

        This path is relative to the root of the extracted tarball,
        not to the ``build_directory``. Defaults to the current directory.

        :return: directory containing CMakeLists.txt
        """
        return self.stage.source_path

    @property
    def std_cmake_args(self):
        """Standard cmake arguments provided as a property for
        convenience of package writers

        :return: standard cmake arguments
        """
        # standard CMake arguments
        std_cmake_args = CMakePackage._std_args(self)
        std_cmake_args += getattr(self, 'cmake_flag_args', [])
        return std_cmake_args

    @staticmethod
    def _std_args(pkg):
        """Computes the standard cmake arguments for a generic package"""
        try:
            generator = pkg.generator
        except AttributeError:
            generator = 'Unix Makefiles'

        # Make sure a valid generator was chosen
        valid_generators = ['Unix Makefiles', 'Ninja']
        if generator not in valid_generators:
            msg = "Invalid CMake generator: '{0}'\n".format(generator)
            msg += "CMakePackage currently supports the following "
            msg += "generators: '{0}'".format("', '".join(valid_generators))
            raise InstallError(msg)

        try:
            build_type = pkg.spec.variants['build_type'].value
        except KeyError:
            build_type = 'RelWithDebInfo'

        args = [
            '-G', generator,
            '-DCMAKE_INSTALL_PREFIX:PATH={0}'.format(pkg.prefix),
            '-DCMAKE_BUILD_TYPE:STRING={0}'.format(build_type),
            '-DCMAKE_VERBOSE_MAKEFILE:BOOL=ON'
        ]

        if platform.mac_ver()[0]:
            args.extend([
                '-DCMAKE_FIND_FRAMEWORK:STRING=LAST',
                '-DCMAKE_FIND_APPBUNDLE:STRING=LAST'
            ])

        # Set up CMake rpath
        args.append('-DCMAKE_INSTALL_RPATH_USE_LINK_PATH:BOOL=FALSE')
        rpaths = ';'.join(spack.build_environment.get_rpaths(pkg))
        args.append('-DCMAKE_INSTALL_RPATH:STRING={0}'.format(rpaths))
        # CMake's find_package() looks in CMAKE_PREFIX_PATH first, help CMake
        # to find immediate link dependencies in right places:
        deps = [
            d.prefix for d in pkg.spec.dependencies(deptype=('build', 'link'))
        ]
        deps = filter_system_paths(deps)
        args.append('-DCMAKE_PREFIX_PATH:STRING={0}'.format(';'.join(deps)))
        return args

    def flags_to_build_system_args(self, flags):
        """Produces a list of all command line arguments to pass the specified
        compiler flags to cmake. Note CMAKE does not have a cppflags option,
        so cppflags will be added to cflags, cxxflags, and fflags to mimic the
        behavior in other tools."""
        # Has to be dynamic attribute due to caching
        setattr(self, 'cmake_flag_args', [])

        flag_string = '-DCMAKE_{0}_FLAGS={1}'
        langs = {'C': 'c', 'CXX': 'cxx', 'Fortran': 'f'}

        # Handle language compiler flags
        for lang, pre in langs.items():
            flag = pre + 'flags'
            # cmake has no explicit cppflags support -> add it to all langs
            lang_flags = ' '.join(
                flags.get(flag, []) + flags.get('cppflags', []))
            if lang_flags:
                self.cmake_flag_args.append(
                    flag_string.format(lang, lang_flags))

        # Cmake has different linker arguments for different build types.
        # We specify for each of them.
        if flags['ldflags']:
            ldflags = ' '.join(flags['ldflags'])
            ld_string = '-DCMAKE_{0}_LINKER_FLAGS={1}'
            # cmake has separate linker arguments for types of builds.
            for type in ['EXE', 'MODULE', 'SHARED', 'STATIC']:
                self.cmake_flag_args.append(ld_string.format(type, ldflags))

        # CMake has libs options separated by language. Apply ours to each.
        if flags['ldlibs']:
            libs_flags = ' '.join(flags['ldlibs'])
            libs_string = '-DCMAKE_{0}_STANDARD_LIBRARIES={1}'
            for lang in langs:
                self.cmake_flag_args.append(
                    libs_string.format(lang, libs_flags))

    @property
    def build_directory(self):
        """Returns the directory to use when building the package

        :return: directory where to build the package
        """
        return os.path.join(self.stage.source_path, 'spack-build')

    def cmake_args(self):
        """Produces a list containing all the arguments that must be passed to
        cmake, except:

            * CMAKE_INSTALL_PREFIX
            * CMAKE_BUILD_TYPE

        which will be set automatically.

        :return: list of arguments for cmake
        """
        return []

    def cmake(self, spec, prefix):
        """Runs ``cmake`` in the build directory"""
        options = [os.path.abspath(self.root_cmakelists_dir)]
        options += self.std_cmake_args
        options += self.cmake_args()
        with working_dir(self.build_directory, create=True):
            inspect.getmodule(self).cmake(*options)

    def build(self, spec, prefix):
        """Make the build targets"""
        with working_dir(self.build_directory):
            if self.generator == 'Unix Makefiles':
                inspect.getmodule(self).make(*self.build_targets)
            elif self.generator == 'Ninja':
                inspect.getmodule(self).ninja(*self.build_targets)

    def install(self, spec, prefix):
        """Make the install targets"""
        with working_dir(self.build_directory):
            if self.generator == 'Unix Makefiles':
                inspect.getmodule(self).make(*self.install_targets)
            elif self.generator == 'Ninja':
                inspect.getmodule(self).ninja(*self.install_targets)

    run_after('build')(PackageBase._run_default_build_time_test_callbacks)

    def check(self):
        """Searches the CMake-generated Makefile for the target ``test``
        and runs it if found.
        """
        with working_dir(self.build_directory):
            if self.generator == 'Unix Makefiles':
                self._if_make_target_execute('test')
                self._if_make_target_execute('check')
            elif self.generator == 'Ninja':
                self._if_ninja_target_execute('test')
                self._if_ninja_target_execute('check')

    # Check that self.prefix is there after installation
    run_after('install')(PackageBase.sanity_check_prefix)
Exemplo n.º 24
0
Arquivo: cmake.py Projeto: zepx/spack
class CMakePackage(PackageBase):
    """Specialized class for packages that are built using CMake

    This class provides three phases that can be overridden:

    * cmake
    * build
    * install

    They all have sensible defaults and for many packages the only thing
    necessary will be to override ``cmake_args``

    Additionally, you may specify make targets for build and install
    phases by overriding ``build_targets`` and ``install_targets``
    """
    phases = ['cmake', 'build', 'install']
    # To be used in UI queries that require to know which
    # build-system class we are using
    build_system_class = 'CMakePackage'

    build_targets = []
    install_targets = ['install']

    depends_on('cmake', type='build')

    def build_type(self):
        """Override to provide the correct build_type in case a complex
        logic is needed
        """
        return 'RelWithDebInfo'

    def root_cmakelists_dir(self):
        """Directory where to find the root CMakeLists.txt"""
        return self.stage.source_path

    @property
    def std_cmake_args(self):
        """Standard cmake arguments provided as a property for
        convenience of package writers
        """
        # standard CMake arguments
        return CMakePackage._std_args(self)

    @staticmethod
    def _std_args(pkg):
        """Computes the standard cmake arguments for a generic package"""
        try:
            build_type = pkg.build_type()
        except AttributeError:
            build_type = 'RelWithDebInfo'

        args = [
            '-DCMAKE_INSTALL_PREFIX:PATH={0}'.format(pkg.prefix),
            '-DCMAKE_BUILD_TYPE:STRING={0}'.format(build_type),
            '-DCMAKE_VERBOSE_MAKEFILE:BOOL=ON'
        ]
        if platform.mac_ver()[0]:
            args.append('-DCMAKE_FIND_FRAMEWORK:STRING=LAST')

        # Set up CMake rpath
        args.append('-DCMAKE_INSTALL_RPATH_USE_LINK_PATH:BOOL=FALSE')
        rpaths = ':'.join(spack.build_environment.get_rpaths(pkg))
        args.append('-DCMAKE_INSTALL_RPATH:STRING={0}'.format(rpaths))
        return args

    def build_directory(self):
        """Override to provide another place to build the package"""
        return join_path(self.stage.source_path, 'spack-build')

    def cmake_args(self):
        """Method to be overridden. Should return an iterable containing
        all the arguments that must be passed to configure, except:

        * CMAKE_INSTALL_PREFIX
        * CMAKE_BUILD_TYPE
        """
        return []

    def cmake(self, spec, prefix):
        """Run cmake in the build directory"""
        options = [self.root_cmakelists_dir()] + self.std_cmake_args + \
            self.cmake_args()
        with working_dir(self.build_directory(), create=True):
            inspect.getmodule(self).cmake(*options)

    def build(self, spec, prefix):
        """Make the build targets"""
        with working_dir(self.build_directory()):
            inspect.getmodule(self).make(*self.build_targets)

    def install(self, spec, prefix):
        """Make the install targets"""
        with working_dir(self.build_directory()):
            inspect.getmodule(self).make(*self.install_targets)

    @PackageBase.sanity_check('build')
    @PackageBase.on_package_attributes(run_tests=True)
    def _run_default_function(self):
        """This function is run after build if ``self.run_tests == True``

        It will search for a method named ``check`` and run it. A sensible
        default is provided in the base class.
        """
        try:
            fn = getattr(self, 'check')
            tty.msg('Trying default build sanity checks [check]')
            fn()
        except AttributeError:
            tty.msg(
                'Skipping default build sanity checks [method `check` not implemented]'
            )  # NOQA: ignore=E501

    def check(self):
        """Default test: search the Makefile for the target ``test``
        and run them if found.
        """
        with working_dir(self.build_directory()):
            self._if_make_target_execute('test')

    # Check that self.prefix is there after installation
    PackageBase.sanity_check('install')(PackageBase.sanity_check_prefix)
Exemplo n.º 25
0
class Genie(Package):  # Genie doesn"t use Autotools
    """Genie is a neutrino Monte Carlo Generator."""

    homepage = "https://www.genie-mc.org"
    url = "https://github.com/GENIE-MC/Generator/archive/R-3_00_06.tar.gz"
    git = "https://github.com/GENIE-MC/Generator.git"

    tags = ["neutrino", "hep"]

    maintainers = [
        # maintainer of this recipe, not affliated with the GENIE collaboration
        "davehadley",
    ]

    version("master", branch="master")
    version("3.0.6",
            sha256=
            "ab56ea85d0c1d09029254365bfe75a1427effa717389753b9e0c1b6c2eaa5eaf")
    version("3.0.4",
            sha256=
            "53f034618fef9f7f0e17d1c4ed72743e4bba590e824b795177a1a8a8486c861e")
    version("3.0.2",
            sha256=
            "34d6c37017b2387c781aea7bc727a0aac0ef45d6b3f3982cc6f3fc82493f65c3")
    version("3.0.0",
            sha256=
            "3953c7d9f1f832dd32dfbc0b9260be59431206c204aec6ab0aa68c01176f2ae6")
    version("2.12.10",
            sha256=
            "c8762db3dcc490f80f8a61268f5b964d4d35b80134b622e89fe2307a836f2a0b")
    version("2.12.8",
            sha256=
            "7ca169a8d9eda7267d28b76b2f3110552852f8eeae263a03cd5139caacebb4ea")
    version("2.12.6",
            sha256=
            "3b450c609875459798ec98e12cf671cc971cbb13345af6d75bd6278d422f3309")
    version("2.12.4",
            sha256=
            "19a4a1633b0847a9f16a44e0c74b9c224ca3bb93975aecf108603c22e807517b")
    version("2.12.2",
            sha256=
            "cbdc45a739878940dadcaaed575b5cad6b5e7035f29605045b1ca557e6faa6d1")
    version("2.12.0",
            sha256=
            "d2b01c80f38d269cb0296b3f2932798ef3f1d51bd130e81274fbfeeb381fac6b")
    version("2.11.2",
            sha256=
            "0f4c25d8ceb7513553671643c9cdac5aa98c40fc8594a5ecb25c077c6b36166e")
    version("2.11.0",
            sha256=
            "1ebe0eb65d797595413632f1cec1cb2621cb8e8d0384a2843799724a79b1d80c")
    version("2.10.10",
            sha256=
            "1dfaadcf1bbaf6e164b612f410c4399301e63497ad6a4891706b1787ac11a7a1")
    version("2.10.8",
            sha256=
            "4f6f5af2062e7c505b76e70547ac2ae304a9790c3e9b9592818d8aebeebc8398")
    version("2.10.6",
            sha256=
            "d00b4288c886f81459fb2967e539f30315d4385f82d1d3f4330298d313f9a176")
    version("2.10.4",
            sha256=
            "df909bf7e1a789ca01794995687da2af803769f0823273a4a3a31678d6d5b0f1")
    version("2.10.2",
            sha256=
            "6abe4e0cdb5e8f5beddf0ccdbebc94c175a9f72592b1cbbffe01b88ee3972bf9")
    version("2.10.0",
            sha256=
            "17bda900c996b6f4f10a7f6a3be94e56c3b8dcdeb2ef8865ca7f20c5fe725291")
    version("2.9.0",
            sha256=
            "8229beb73f65f5af86a77bf141acfbe4a8b68cba9d797aae083a929906f6f2a2")
    version("2.8.6",
            sha256=
            "310dc8e0d17a65e6b9773e398250703a3a6f94ceafe94f599ae0f7b3fecf7e6c")

    depends_on("root+pythia6")
    depends_on("pythia6")
    depends_on("lhapdf", when="@3:")
    depends_on("lhapdf5", when="@:2")
    depends_on("log4cpp")
    depends_on("libxml2")
    depends_on("gsl")

    # GENIE does not actually require cmake, but root does.
    # Spack's concretizer fails with "unsatisfiable constraint" if we don't add this.
    depends_on("cmake@3:")

    # GENIE Makefile's think that the spack compiler is invalid.
    # Disables this check.
    patch("genie_disable_gopt_with_compiler_check.patch",
          level=0,
          when="@2.11:")

    # Flags for GENIE"s optional but disabled by default features
    variant(
        "atmo",
        default=False,
        description="Enable GENIE Atmospheric neutrino event generation app")
    variant(
        "fnal",
        default=False,
        description="Enables FNAL experiment-specific event generation app")
    variant("nucleondecay",
            default=False,
            description="Enable GENIE Nucleon decay event generation app")
    variant("masterclass",
            default=False,
            description="Enable GENIE neutrino masterclass app")
    variant("t2k",
            default=False,
            description="Enable T2K-specific generation app")
    variant(
        "vleextension",
        default=False,
        description="Enable GENIE very low energy (1 MeV - 100 MeV) extension")

    phases = ["configure", "build", "install"]

    def url_for_version(self, version):
        url = "https://github.com/GENIE-MC/Generator/archive/R-{0}.tar.gz"
        if version >= Version(3):
            return url.format("{0}_{1:02d}_{2:02d}".format(*version))
        else:
            return url.format(version.underscored)

    def setup_build_environment(self, env):
        env.set("GENIE", self.stage.source_path)
        return super(Genie, self).setup_build_environment(env)

    def setup_run_environment(self, env):
        env.set("GENIE", self.prefix)
        return super(Genie, self).setup_run_environment(env)

    def configure(self, spec, prefix):
        configure = Executable("./configure")
        args = self._configure_args(spec, prefix)
        configure(*args)

    def build(self, spec, prefix):
        # parallel build is not supported on GENIE 2
        self._make(parallel=spec.satisfies("@3:"))

    def install(self, spec, prefix):
        # GENIE make install does not support parallel jobs
        self._make("install", parallel=False)
        # GENIE requires these files to be present at runtime, but doesn"t install them
        # so we must install them ourselves
        # install_tree function is injected into scope by spack build_environment.py
        install_tree("config", os.sep.join((prefix, "config")))
        install_tree("data", os.sep.join((prefix, "data")))

    def _configure_args(self, spec, prefix):
        args = [
            "--prefix=" + prefix,
            "--with-compiler=" + os.environ["CC"],
            "--with-libxml2-inc={0}{1}libxml2".format(
                spec["libxml2"].prefix.include, os.sep),
            "--with-libxml2-lib=" + spec["libxml2"].prefix.lib,
            "--with-log4cpp-inc=" + spec["log4cpp"].prefix.include,
            "--with-log4cpp-lib=" + spec["log4cpp"].prefix.lib,
            "--with-pythia6-lib=" + spec["pythia6"].prefix.lib,
        ]
        if self.spec.satisfies("@:2"):
            args += [
                "--enable-lhapdf",
                "--with-lhapdf-inc=" + spec["lhapdf5"].prefix.include,
                "--with-lhapdf-lib=" + spec["lhapdf5"].prefix.lib,
                # must be enabled or some GENIE 2 versions fail to link
                # this option was removed in GENIE 3
                "--enable-rwght",
            ]
        else:
            args += [
                "--enable-lhapdf6",
                "--with-lhapdf6-inc=" + spec["lhapdf"].prefix.include,
                "--with-lhapdf6-lib=" + spec["lhapdf"].prefix.lib,
            ]
        if "+vleextension" in self.spec:
            args += ["--enable-vle-extension"]
        if "+t2k" in self.spec:
            args += ["--enable-t2k"]
        if "+fnal" in self.spec:
            args += ["--enable-fnal"]
        if "+atmo" in self.spec:
            args += ["--enable-atmo"]
        if "+nucleondecay" in self.spec:
            args += ["--enable-nucleon-decay"]
        if "+masterclass" in self.spec:
            args += ["--enable-masterclass"]
        return args

    def _make(self, *args, **kwargs):
        parallel = kwargs.get("parallel", False)
        args = list(self._make_args) + list(args)
        # make function is injected into scope by spack build_environment.py
        make(*args, parallel=parallel)

    @property
    def _make_args(self):
        return [
            "CC=c++",
            "CXX=c++",
            "LD=c++",
        ]
Exemplo n.º 26
0
class PerlPackage(PackageBase):
    """Specialized class for packages that are built using Perl.

    This class provides four phases that can be overridden if required:

        1. :py:meth:`~.PerlPackage.configure`
        2. :py:meth:`~.PerlPackage.build`
        3. :py:meth:`~.PerlPackage.check`
        4. :py:meth:`~.PerlPackage.install`

    The default methods use, in order of preference:
        (1) Makefile.PL,
        (2) Build.PL.

    Some packages may need to override
    :py:meth:`~.PerlPackage.configure_args`,
    which produces a list of arguments for
    :py:meth:`~.PerlPackage.configure`.
    Arguments should not include the installation base directory.
    """
    #: Phases of a Perl package
    phases = ['configure', 'build', 'install']

    #: This attribute is used in UI queries that need to know the build
    #: system base class
    build_system_class = 'PerlPackage'

    #: Callback names for build-time test
    build_time_test_callbacks = ['check']

    extends('perl')

    depends_on('perl', type=('build', 'run'))

    def configure_args(self):
        """Produces a list containing the arguments that must be passed to
        :py:meth:`~.PerlPackage.configure`. Arguments should not include
        the installation base directory, which is prepended automatically.

        :return: list of arguments for Makefile.PL or Build.PL
        """
        return []

    def configure(self, spec, prefix):
        """Runs Makefile.PL or Build.PL with arguments consisting of
        an appropriate installation base directory followed by the
        list returned by :py:meth:`~.PerlPackage.configure_args`.

        :raise RuntimeError: if neither Makefile.PL or Build.PL exist
        """
        if os.path.isfile('Makefile.PL'):
            self.build_method = 'Makefile.PL'
            self.build_executable = inspect.getmodule(self).make
        elif os.path.isfile('Build.PL'):
            self.build_method = 'Build.PL'
            self.build_executable = Executable(
                join_path(self.stage.source_path, 'Build'))
        else:
            raise RuntimeError('Unknown build_method for perl package')

        if self.build_method == 'Makefile.PL':
            options = ['Makefile.PL', 'INSTALL_BASE={0}'.format(prefix)]
        elif self.build_method == 'Build.PL':
            options = ['Build.PL', '--install_base', prefix]
        options += self.configure_args()

        inspect.getmodule(self).perl(*options)

    def build(self, spec, prefix):
        """Builds a Perl package."""
        self.build_executable()

    # Ensure that tests run after build (if requested):
    run_after('build')(PackageBase._run_default_build_time_test_callbacks)

    def check(self):
        """Runs built-in tests of a Perl package."""
        self.build_executable('test')

    def install(self, spec, prefix):
        """Installs a Perl package."""
        self.build_executable('install')

    # Check that self.prefix is there after installation
    run_after('install')(PackageBase.sanity_check_prefix)
Exemplo n.º 27
0
class AutotoolsPackage(PackageBase):
    """Specialized class for packages built using GNU Autotools.

    This class provides four phases that can be overridden:

        1. :py:meth:`~.AutotoolsPackage.autoreconf`
        2. :py:meth:`~.AutotoolsPackage.configure`
        3. :py:meth:`~.AutotoolsPackage.build`
        4. :py:meth:`~.AutotoolsPackage.install`

    They all have sensible defaults and for many packages the only thing
    necessary will be to override the helper method
    :meth:`~spack.build_systems.autotools.AutotoolsPackage.configure_args`.
    For a finer tuning you may also override:

        +-----------------------------------------------+--------------------+
        | **Method**                                    | **Purpose**        |
        +===============================================+====================+
        | :py:attr:`~.AutotoolsPackage.build_targets`   | Specify ``make``   |
        |                                               | targets for the    |
        |                                               | build phase        |
        +-----------------------------------------------+--------------------+
        | :py:attr:`~.AutotoolsPackage.install_targets` | Specify ``make``   |
        |                                               | targets for the    |
        |                                               | install phase      |
        +-----------------------------------------------+--------------------+
        | :py:meth:`~.AutotoolsPackage.check`           | Run  build time    |
        |                                               | tests if required  |
        +-----------------------------------------------+--------------------+

    """
    #: Phases of a GNU Autotools package
    phases = ['autoreconf', 'configure', 'build', 'install']
    #: This attribute is used in UI queries that need to know the build
    #: system base class
    build_system_class = 'AutotoolsPackage'

    @property
    def patch_config_files(self):
        """
        Whether or not to update old ``config.guess`` and ``config.sub`` files
        distributed with the tarball. This currently only applies to
        ``ppc64le:``, ``aarch64:``, and ``riscv64`` target architectures. The
        substitutes are taken from the ``gnuconfig`` package, which is
        automatically added as a build dependency for these architectures. In
        case system versions of these config files are required, the
        ``gnuconfig`` package can be marked external with a prefix pointing to
        the directory containing the system ``config.guess`` and ``config.sub``
        files.
        """
        return (self.spec.satisfies('target=ppc64le:')
                or self.spec.satisfies('target=aarch64:')
                or self.spec.satisfies('target=riscv64:'))

    #: Whether or not to update ``libtool``
    #: (currently only for Arm/Clang/Fujitsu compilers)
    patch_libtool = True

    #: Targets for ``make`` during the :py:meth:`~.AutotoolsPackage.build`
    #: phase
    build_targets = []  # type: List[str]
    #: Targets for ``make`` during the :py:meth:`~.AutotoolsPackage.install`
    #: phase
    install_targets = ['install']

    #: Callback names for build-time test
    build_time_test_callbacks = ['check']

    #: Callback names for install-time test
    install_time_test_callbacks = ['installcheck']

    #: Set to true to force the autoreconf step even if configure is present
    force_autoreconf = False
    #: Options to be passed to autoreconf when using the default implementation
    autoreconf_extra_args = []  # type: List[str]

    #: If False deletes all the .la files in the prefix folder
    #: after the installation. If True instead it installs them.
    install_libtool_archives = False

    depends_on('gnuconfig', type='build', when='target=ppc64le:')
    depends_on('gnuconfig', type='build', when='target=aarch64:')
    depends_on('gnuconfig', type='build', when='target=riscv64:')

    @property
    def _removed_la_files_log(self):
        """File containing the list of remove libtool archives"""
        build_dir = self.build_directory
        if not os.path.isabs(self.build_directory):
            build_dir = os.path.join(self.stage.path, build_dir)
        return os.path.join(build_dir, 'removed_la_files.txt')

    @property
    def archive_files(self):
        """Files to archive for packages based on autotools"""
        files = [os.path.join(self.build_directory, 'config.log')]
        if not self.install_libtool_archives:
            files.append(self._removed_la_files_log)
        return files

    @run_after('autoreconf')
    def _do_patch_config_files(self):
        """Some packages ship with older config.guess/config.sub files and
        need to have these updated when installed on a newer architecture.
        In particular, config.guess fails for PPC64LE for version prior
        to a 2013-06-10 build date (automake 1.13.4) and for ARM (aarch64) and
        RISC-V (riscv64).
        """
        if not self.patch_config_files:
            return

        # TODO: Expand this to select the 'config.sub'-compatible architecture
        # for each platform (e.g. 'config.sub' doesn't accept 'power9le', but
        # does accept 'ppc64le').
        if self.spec.satisfies('target=ppc64le:'):
            config_arch = 'ppc64le'
        elif self.spec.satisfies('target=aarch64:'):
            config_arch = 'aarch64'
        elif self.spec.satisfies('target=riscv64:'):
            config_arch = 'riscv64'
        else:
            config_arch = 'local'

        def runs_ok(script_abs_path):
            # Construct the list of arguments for the call
            additional_args = {'config.sub': [config_arch]}
            script_name = os.path.basename(script_abs_path)
            args = [script_abs_path] + additional_args.get(script_name, [])

            try:
                check_call(args, stdout=PIPE, stderr=PIPE)
            except Exception as e:
                tty.debug(e)
                return False

            return True

        # Get the list of files that needs to be patched
        to_be_patched = fs.find(self.stage.path,
                                files=['config.sub', 'config.guess'])
        to_be_patched = [f for f in to_be_patched if not runs_ok(f)]

        # If there are no files to be patched, return early
        if not to_be_patched:
            return

        # Otherwise, require `gnuconfig` to be a build dependency
        self._require_build_deps(pkgs=['gnuconfig'],
                                 spec=self.spec,
                                 err="Cannot patch config files")

        # Get the config files we need to patch (config.sub / config.guess).
        to_be_found = list(set(os.path.basename(f) for f in to_be_patched))
        gnuconfig = self.spec['gnuconfig']
        gnuconfig_dir = gnuconfig.prefix

        # An external gnuconfig may not not have a prefix.
        if gnuconfig_dir is None:
            raise InstallError(
                "Spack could not find substitutes for GNU config "
                "files because no prefix is available for the "
                "`gnuconfig` package. Make sure you set a prefix "
                "path instead of modules for external `gnuconfig`.")

        candidates = fs.find(gnuconfig_dir, files=to_be_found, recursive=False)

        # For external packages the user may have specified an incorrect prefix.
        # otherwise the installation is just corrupt.
        if not candidates:
            msg = ("Spack could not find `config.guess` and `config.sub` "
                   "files in the `gnuconfig` prefix `{0}`. This means the "
                   "`gnuconfig` package is broken").format(gnuconfig_dir)
            if gnuconfig.external:
                msg += (
                    " or the `gnuconfig` package prefix is misconfigured as"
                    " an external package")
            raise InstallError(msg)

        # Filter working substitutes
        candidates = [f for f in candidates if runs_ok(f)]
        substitutes = {}
        for candidate in candidates:
            config_file = os.path.basename(candidate)
            substitutes[config_file] = candidate
            to_be_found.remove(config_file)

        # Check that we found everything we needed
        if to_be_found:
            msg = """\
Spack could not find working replacements for the following autotools config
files: {0}.

To resolve this problem, please try the following:
1. Try to rebuild with `patch_config_files = False` in the package `{1}`, to
   rule out that Spack tries to replace config files not used by the build.
2. Verify that the `gnuconfig` package is up-to-date.
3. On some systems you need to use system-provided `config.guess` and `config.sub`
   files. In this case, mark `gnuconfig` as an non-buildable external package,
   and set the prefix to the directory containing the `config.guess` and
   `config.sub` files.
"""
            raise InstallError(msg.format(', '.join(to_be_found), self.name))

        # Copy the good files over the bad ones
        for abs_path in to_be_patched:
            name = os.path.basename(abs_path)
            mode = os.stat(abs_path).st_mode
            os.chmod(abs_path, stat.S_IWUSR)
            fs.copy(substitutes[name], abs_path)
            os.chmod(abs_path, mode)

    @run_before('configure')
    def _set_autotools_environment_variables(self):
        """Many autotools builds use a version of mknod.m4 that fails when
        running as root unless FORCE_UNSAFE_CONFIGURE is set to 1.

        We set this to 1 and expect the user to take responsibility if
        they are running as root. They have to anyway, as this variable
        doesn't actually prevent configure from doing bad things as root.
        Without it, configure just fails halfway through, but it can
        still run things *before* this check. Forcing this just removes a
        nuisance -- this is not circumventing any real protection.

        """
        os.environ["FORCE_UNSAFE_CONFIGURE"] = "1"

    @run_after('configure')
    def _do_patch_libtool(self):
        """If configure generates a "libtool" script that does not correctly
        detect the compiler (and patch_libtool is set), patch in the correct
        flags for the Arm, Clang/Flang, and Fujitsu compilers."""

        # Exit early if we are required not to patch libtool
        if not self.patch_libtool:
            return

        for libtool_path in fs.find(self.build_directory,
                                    'libtool',
                                    recursive=True):
            self._patch_libtool(libtool_path)

    def _patch_libtool(self, libtool_path):
        if self.spec.satisfies('%arm')\
                or self.spec.satisfies('%clang')\
                or self.spec.satisfies('%fj'):
            fs.filter_file('wl=""\n', 'wl="-Wl,"\n', libtool_path)
            fs.filter_file(
                'pic_flag=""\n',
                'pic_flag="{0}"\n'.format(self.compiler.cc_pic_flag),
                libtool_path)
        if self.spec.satisfies('%fj'):
            fs.filter_file('-nostdlib', '', libtool_path)
            rehead = r'/\S*/'
            objfile = [
                'fjhpctag.o', 'fjcrt0.o', 'fjlang08.o', 'fjomp.o', 'crti.o',
                'crtbeginS.o', 'crtendS.o'
            ]
            for o in objfile:
                fs.filter_file(rehead + o, '', libtool_path)

    @property
    def configure_directory(self):
        """Returns the directory where 'configure' resides.

        :return: directory where to find configure
        """
        return self.stage.source_path

    @property
    def configure_abs_path(self):
        # Absolute path to configure
        configure_abs_path = os.path.join(
            os.path.abspath(self.configure_directory), 'configure')
        return configure_abs_path

    @property
    def build_directory(self):
        """Override to provide another place to build the package"""
        return self.configure_directory

    @run_before('autoreconf')
    def delete_configure_to_force_update(self):
        if self.force_autoreconf:
            force_remove(self.configure_abs_path)

    def _require_build_deps(self, pkgs, spec, err):
        """Require `pkgs` to be direct build dependencies of `spec`. Raises a
        RuntimeError with a helpful error messages when any dep is missing."""

        build_deps = [d.name for d in spec.dependencies(deptype='build')]
        missing_deps = [x for x in pkgs if x not in build_deps]

        if not missing_deps:
            return

        # Raise an exception on missing deps.
        msg = ("{0}: missing dependencies: {1}.\n\nPlease add "
               "the following lines to the package:\n\n".format(
                   err, ", ".join(missing_deps)))

        for dep in missing_deps:
            msg += (
                "    depends_on('{0}', type='build', when='@{1}')\n".format(
                    dep, spec.version))

        msg += "\nUpdate the version (when='@{0}') as needed.".format(
            spec.version)
        raise RuntimeError(msg)

    def autoreconf(self, spec, prefix):
        """Not needed usually, configure should be already there"""

        # If configure exists nothing needs to be done
        if os.path.exists(self.configure_abs_path):
            return

        # Else try to regenerate it, which reuquires a few build dependencies
        self._require_build_deps(pkgs=['autoconf', 'automake', 'libtool'],
                                 spec=spec,
                                 err="Cannot generate configure")

        tty.msg('Configure script not found: trying to generate it')
        tty.warn('*********************************************************')
        tty.warn('* If the default procedure fails, consider implementing *')
        tty.warn('*        a custom AUTORECONF phase in the package       *')
        tty.warn('*********************************************************')
        with working_dir(self.configure_directory):
            m = inspect.getmodule(self)
            # This line is what is needed most of the time
            # --install, --verbose, --force
            autoreconf_args = ['-ivf']
            autoreconf_args += self.autoreconf_search_path_args
            autoreconf_args += self.autoreconf_extra_args
            m.autoreconf(*autoreconf_args)

    @property
    def autoreconf_search_path_args(self):
        """Arguments to autoreconf to modify the search paths"""
        search_path_args = []
        for dep in self.spec.dependencies(deptype='build'):
            if os.path.exists(dep.prefix.share.aclocal):
                search_path_args.extend(['-I', dep.prefix.share.aclocal])
        return search_path_args

    @run_after('autoreconf')
    def set_configure_or_die(self):
        """Checks the presence of a ``configure`` file after the
        autoreconf phase. If it is found sets a module attribute
        appropriately, otherwise raises an error.

        :raises RuntimeError: if a configure script is not found in
            :py:meth:`~AutotoolsPackage.configure_directory`
        """
        # Check if a configure script is there. If not raise a RuntimeError.
        if not os.path.exists(self.configure_abs_path):
            msg = 'configure script not found in {0}'
            raise RuntimeError(msg.format(self.configure_directory))

        # Monkey-patch the configure script in the corresponding module
        inspect.getmodule(self).configure = Executable(self.configure_abs_path)

    def configure_args(self):
        """Produces a list containing all the arguments that must be passed to
        configure, except ``--prefix`` which will be pre-pended to the list.

        :return: list of arguments for configure
        """
        return []

    def flags_to_build_system_args(self, flags):
        """Produces a list of all command line arguments to pass specified
        compiler flags to configure."""
        # Has to be dynamic attribute due to caching.
        setattr(self, 'configure_flag_args', [])
        for flag, values in flags.items():
            if values:
                values_str = '{0}={1}'.format(flag.upper(), ' '.join(values))
                self.configure_flag_args.append(values_str)
        # Spack's fflags are meant for both F77 and FC, therefore we
        # additionaly set FCFLAGS if required.
        values = flags.get('fflags', None)
        if values:
            values_str = 'FCFLAGS={0}'.format(' '.join(values))
            self.configure_flag_args.append(values_str)

    def configure(self, spec, prefix):
        """Runs configure with the arguments specified in
        :meth:`~spack.build_systems.autotools.AutotoolsPackage.configure_args`
        and an appropriately set prefix.
        """
        options = getattr(self, 'configure_flag_args', [])
        options += ['--prefix={0}'.format(prefix)]
        options += self.configure_args()

        with working_dir(self.build_directory, create=True):
            inspect.getmodule(self).configure(*options)

    def build(self, spec, prefix):
        """Makes the build targets specified by
        :py:attr:``~.AutotoolsPackage.build_targets``
        """
        # See https://autotools.io/automake/silent.html
        params = ['V=1']
        params += self.build_targets
        with working_dir(self.build_directory):
            inspect.getmodule(self).make(*params)

    def install(self, spec, prefix):
        """Makes the install targets specified by
        :py:attr:``~.AutotoolsPackage.install_targets``
        """
        with working_dir(self.build_directory):
            inspect.getmodule(self).make(*self.install_targets)

    run_after('build')(PackageBase._run_default_build_time_test_callbacks)

    def check(self):
        """Searches the Makefile for targets ``test`` and ``check``
        and runs them if found.
        """
        with working_dir(self.build_directory):
            self._if_make_target_execute('test')
            self._if_make_target_execute('check')

    def _activate_or_not(self,
                         name,
                         activation_word,
                         deactivation_word,
                         activation_value=None,
                         variant=None):
        """This function contains the current implementation details of
        :meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without` and
        :meth:`~spack.build_systems.autotools.AutotoolsPackage.enable_or_disable`.

        Args:
            name (str): name of the option that is being activated or not
            activation_word (str): the default activation word ('with' in the
                case of ``with_or_without``)
            deactivation_word (str): the default deactivation word ('without'
                in the case of ``with_or_without``)
            activation_value (typing.Callable): callable that accepts a single
                value. This value is either one of the allowed values for a
                multi-valued variant or the name of a bool-valued variant.
                Returns the parameter to be used when the value is activated.

                The special value 'prefix' can also be assigned and will return
                ``spec[name].prefix`` as activation parameter.
            variant (str): name of the variant that is being processed
                           (if different from option name)

        Examples:

            Given a package with:

            .. code-block:: python

                variant('foo', values=('x', 'y'), description='')
                variant('bar', default=True, description='')
                variant('ba_z', default=True, description='')

            calling this function like:

            .. code-block:: python

                _activate_or_not(
                    'foo', 'with', 'without', activation_value='prefix'
                )
                _activate_or_not('bar', 'with', 'without')
                _activate_or_not('ba-z', 'with', 'without', variant='ba_z')

            will generate the following configuration options:

            .. code-block:: console

                --with-x=<prefix-to-x> --without-y --with-bar --with-ba-z

            for ``<spec-name> foo=x +bar``

        Note: returns an empty list when the variant is conditional and its condition
              is not met.

        Returns:
            list: list of strings that corresponds to the activation/deactivation
            of the variant that has been processed

        Raises:
            KeyError: if name is not among known variants
        """
        spec = self.spec
        args = []

        if activation_value == 'prefix':
            activation_value = lambda x: spec[x].prefix

        variant = variant or name

        # Defensively look that the name passed as argument is among
        # variants
        if variant not in self.variants:
            msg = '"{0}" is not a variant of "{1}"'
            raise KeyError(msg.format(variant, self.name))

        if variant not in spec.variants:
            return []

        # Create a list of pairs. Each pair includes a configuration
        # option and whether or not that option is activated
        variant_desc, _ = self.variants[variant]
        if set(variant_desc.values) == set((True, False)):
            # BoolValuedVariant carry information about a single option.
            # Nonetheless, for uniformity of treatment we'll package them
            # in an iterable of one element.
            condition = '+{name}'.format(name=variant)
            options = [(name, condition in spec)]
        else:
            condition = '{variant}={value}'
            # "feature_values" is used to track values which correspond to
            # features which can be enabled or disabled as understood by the
            # package's build system. It excludes values which have special
            # meanings and do not correspond to features (e.g. "none")
            feature_values = getattr(variant_desc.values, 'feature_values',
                                     None) or variant_desc.values

            options = [(value, condition.format(variant=variant, value=value)
                        in spec) for value in feature_values]

        # For each allowed value in the list of values
        for option_value, activated in options:
            # Search for an override in the package for this value
            override_name = '{0}_or_{1}_{2}'.format(activation_word,
                                                    deactivation_word,
                                                    option_value)
            line_generator = getattr(self, override_name, None)
            # If not available use a sensible default
            if line_generator is None:

                def _default_generator(is_activated):
                    if is_activated:
                        line = '--{0}-{1}'.format(activation_word,
                                                  option_value)
                        if activation_value is not None and activation_value(
                                option_value):  # NOQA=ignore=E501
                            line += '={0}'.format(
                                activation_value(option_value))
                        return line
                    return '--{0}-{1}'.format(deactivation_word, option_value)

                line_generator = _default_generator
            args.append(line_generator(activated))
        return args

    def with_or_without(self, name, activation_value=None, variant=None):
        """Inspects a variant and returns the arguments that activate
        or deactivate the selected feature(s) for the configure options.

        This function works on all type of variants. For bool-valued variants
        it will return by default ``--with-{name}`` or ``--without-{name}``.
        For other kinds of variants it will cycle over the allowed values and
        return either ``--with-{value}`` or ``--without-{value}``.

        If activation_value is given, then for each possible value of the
        variant, the option ``--with-{value}=activation_value(value)`` or
        ``--without-{value}`` will be added depending on whether or not
        ``variant=value`` is in the spec.

        Args:
            name (str): name of a valid multi-valued variant
            activation_value (typing.Callable): callable that accepts a single
                value and returns the parameter to be used leading to an entry
                of the type ``--with-{name}={parameter}``.

                The special value 'prefix' can also be assigned and will return
                ``spec[name].prefix`` as activation parameter.

        Returns:
            list of arguments to configure
        """
        return self._activate_or_not(name, 'with', 'without', activation_value,
                                     variant)

    def enable_or_disable(self, name, activation_value=None, variant=None):
        """Same as
        :meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without`
        but substitute ``with`` with ``enable`` and ``without`` with ``disable``.

        Args:
            name (str): name of a valid multi-valued variant
            activation_value (typing.Callable): if present accepts a single value
                and returns the parameter to be used leading to an entry of the
                type ``--enable-{name}={parameter}``

                The special value 'prefix' can also be assigned and will return
                ``spec[name].prefix`` as activation parameter.

        Returns:
            list of arguments to configure
        """
        return self._activate_or_not(name, 'enable', 'disable',
                                     activation_value, variant)

    run_after('install')(PackageBase._run_default_install_time_test_callbacks)

    def installcheck(self):
        """Searches the Makefile for an ``installcheck`` target
        and runs it if found.
        """
        with working_dir(self.build_directory):
            self._if_make_target_execute('installcheck')

    # Check that self.prefix is there after installation
    run_after('install')(PackageBase.sanity_check_prefix)

    @run_after('install')
    def remove_libtool_archives(self):
        """Remove all .la files in prefix sub-folders if the package sets
        ``install_libtool_archives`` to be False.
        """
        # If .la files are to be installed there's nothing to do
        if self.install_libtool_archives:
            return

        # Remove the files and create a log of what was removed
        libtool_files = fs.find(str(self.prefix), '*.la', recursive=True)
        with fs.safe_remove(*libtool_files):
            fs.mkdirp(os.path.dirname(self._removed_la_files_log))
            with open(self._removed_la_files_log, mode='w') as f:
                f.write('\n'.join(libtool_files))

    # On macOS, force rpaths for shared library IDs and remove duplicate rpaths
    run_after('install')(PackageBase.apply_macos_rpath_fixups)
Exemplo n.º 28
0
Arquivo: sip.py Projeto: wangvsa/spack
class SIPPackage(PackageBase):
    """Specialized class for packages that are built using the
    SIP build system. See https://www.riverbankcomputing.com/software/sip/intro
    for more information.

    This class provides the following phases that can be overridden:

    * configure
    * build
    * install

    The configure phase already adds a set of default flags. To see more
    options, run ``python configure.py --help``.
    """
    # Default phases
    phases = ['configure', 'build', 'install']

    # To be used in UI queries that require to know which
    # build-system class we are using
    build_system_class = 'SIPPackage'

    #: Name of private sip module to install alongside package
    sip_module = 'sip'

    #: Callback names for install-time test
    install_time_test_callbacks = ['test']

    extends('python')

    depends_on('qt')
    depends_on('py-sip')

    @property
    def import_modules(self):
        """Names of modules that the Python package provides.

        These are used to test whether or not the installation succeeded.
        These names generally come from running:

        .. code-block:: python

           >> import setuptools
           >> setuptools.find_packages()

        in the source tarball directory. If the module names are incorrectly
        detected, this property can be overridden by the package.

        Returns:
            list: list of strings of module names
        """
        modules = []
        root = os.path.join(
            self.prefix,
            self.spec['python'].package.config_vars['python_lib']['false']
            ['false'],
        )

        # Some Python libraries are packages: collections of modules
        # distributed in directories containing __init__.py files
        for path in find(root, '__init__.py', recursive=True):
            modules.append(
                path.replace(root + os.sep, '',
                             1).replace(os.sep + '__init__.py',
                                        '').replace('/', '.'))

        # Some Python libraries are modules: individual *.py files
        # found in the site-packages directory
        for path in find(root, '*.py', recursive=False):
            modules.append(
                path.replace(root + os.sep, '',
                             1).replace('.py', '').replace('/', '.'))

        tty.debug('Detected the following modules: {0}'.format(modules))

        return modules

    def python(self, *args, **kwargs):
        """The python ``Executable``."""
        inspect.getmodule(self).python(*args, **kwargs)

    def configure_file(self):
        """Returns the name of the configure file to use."""
        return 'configure.py'

    def configure(self, spec, prefix):
        """Configure the package."""
        configure = self.configure_file()

        args = self.configure_args()

        python_include_dir = 'python' + str(spec['python'].version.up_to(2))

        args.extend([
            '--verbose',
            '--confirm-license',
            '--qmake',
            spec['qt'].prefix.bin.qmake,
            '--sip',
            spec['py-sip'].prefix.bin.sip,
            '--sip-incdir',
            join_path(spec['py-sip'].prefix.include, python_include_dir),
            '--bindir',
            prefix.bin,
            '--destdir',
            inspect.getmodule(self).site_packages_dir,
        ])

        self.python(configure, *args)

    def configure_args(self):
        """Arguments to pass to configure."""
        return []

    def build(self, spec, prefix):
        """Build the package."""
        args = self.build_args()

        inspect.getmodule(self).make(*args)

    def build_args(self):
        """Arguments to pass to build."""
        return []

    def install(self, spec, prefix):
        """Install the package."""
        args = self.install_args()

        inspect.getmodule(self).make('install', parallel=False, *args)

    def install_args(self):
        """Arguments to pass to install."""
        return []

    # Testing

    def test(self):
        """Attempts to import modules of the installed package."""

        # Make sure we are importing the installed modules,
        # not the ones in the source directory
        for module in self.import_modules:
            self.run_test(inspect.getmodule(self).python.path,
                          ['-c', 'import {0}'.format(module)],
                          purpose='checking import of {0}'.format(module),
                          work_dir='spack-test')

    run_after('install')(PackageBase._run_default_install_time_test_callbacks)

    # Check that self.prefix is there after installation
    run_after('install')(PackageBase.sanity_check_prefix)

    @run_after('install')
    def extend_path_setup(self):
        # See github issue #14121 and PR #15297
        module = self.spec['py-sip'].variants['module'].value
        if module != 'sip':
            module = module.split('.')[0]
            with working_dir(inspect.getmodule(self).site_packages_dir):
                with open(os.path.join(module, '__init__.py'), 'a') as f:
                    f.write('from pkgutil import extend_path\n')
                    f.write('__path__ = extend_path(__path__, __name__)\n')
Exemplo n.º 29
0
class PythonPackage(PackageBase):
    """Specialized class for packages that are built using pip."""
    #: Package name, version, and extension on PyPI
    pypi = None

    maintainers = ['adamjstewart']

    # Default phases
    phases = ['install']

    # To be used in UI queries that require to know which
    # build-system class we are using
    build_system_class = 'PythonPackage'

    #: Callback names for install-time test
    install_time_test_callbacks = ['test']

    extends('python')
    depends_on('py-pip', type='build')
    # FIXME: technically wheel is only needed when building from source, not when
    # installing a downloaded wheel, but I don't want to add wheel as a dep to every
    # package manually
    depends_on('py-wheel', type='build')

    py_namespace = None

    @staticmethod
    def _std_args(cls):
        return [
            # Verbose
            '-vvv',
            # Disable prompting for input
            '--no-input',
            # Disable the cache
            '--no-cache-dir',
            # Don't check to see if pip is up-to-date
            '--disable-pip-version-check',
            # Install packages
            'install',
            # Don't install package dependencies
            '--no-deps',
            # Overwrite existing packages
            '--ignore-installed',
            # Use env vars like PYTHONPATH
            '--no-build-isolation',
            # Don't warn that prefix.bin is not in PATH
            '--no-warn-script-location',
            # Ignore the PyPI package index
            '--no-index',
        ]

    @property
    def homepage(self):
        if self.pypi:
            name = self.pypi.split('/')[0]
            return 'https://pypi.org/project/' + name + '/'

    @property
    def url(self):
        if self.pypi:
            return ('https://files.pythonhosted.org/packages/source/' +
                    self.pypi[0] + '/' + self.pypi)

    @property
    def list_url(self):
        if self.pypi:
            name = self.pypi.split('/')[0]
            return 'https://pypi.org/simple/' + name + '/'

    @property
    def import_modules(self):
        """Names of modules that the Python package provides.

        These are used to test whether or not the installation succeeded.
        These names generally come from running:

        .. code-block:: python

           >> import setuptools
           >> setuptools.find_packages()

        in the source tarball directory. If the module names are incorrectly
        detected, this property can be overridden by the package.

        Returns:
            list: list of strings of module names
        """
        modules = []
        pkg = self.spec['python'].package

        # Packages may be installed in platform-specific or platform-independent
        # site-packages directories
        for directory in {pkg.platlib, pkg.purelib}:
            root = os.path.join(self.prefix, directory)

            # Some Python libraries are packages: collections of modules
            # distributed in directories containing __init__.py files
            for path in find(root, '__init__.py', recursive=True):
                modules.append(
                    path.replace(root + os.sep, '',
                                 1).replace(os.sep + '__init__.py',
                                            '').replace('/', '.'))

            # Some Python libraries are modules: individual *.py files
            # found in the site-packages directory
            for path in find(root, '*.py', recursive=False):
                modules.append(
                    path.replace(root + os.sep, '',
                                 1).replace('.py', '').replace('/', '.'))

        modules = [mod for mod in modules if re.match('[a-zA-Z0-9._]+$', mod)]

        tty.debug('Detected the following modules: {0}'.format(modules))

        return modules

    @property
    def build_directory(self):
        """The root directory of the Python package.

        This is usually the directory containing one of the following files:

        * ``pyproject.toml``
        * ``setup.cfg``
        * ``setup.py``
        """
        return self.stage.source_path

    def install_options(self, spec, prefix):
        """Extra arguments to be supplied to the setup.py install command."""
        return []

    def global_options(self, spec, prefix):
        """Extra global options to be supplied to the setup.py call before the install
        or bdist_wheel command."""
        return []

    def install(self, spec, prefix):
        """Install everything from build directory."""

        args = PythonPackage._std_args(self) + ['--prefix=' + prefix]

        for option in self.install_options(spec, prefix):
            args.append('--install-option=' + option)
        for option in self.global_options(spec, prefix):
            args.append('--global-option=' + option)

        if self.stage.archive_file and self.stage.archive_file.endswith(
                '.whl'):
            args.append(self.stage.archive_file)
        else:
            args.append('.')

        pip = inspect.getmodule(self).pip
        with working_dir(self.build_directory):
            pip(*args)

    # Testing

    def test(self):
        """Attempts to import modules of the installed package."""

        # Make sure we are importing the installed modules,
        # not the ones in the source directory
        for module in self.import_modules:
            self.run_test(inspect.getmodule(self).python.path,
                          ['-c', 'import {0}'.format(module)],
                          purpose='checking import of {0}'.format(module),
                          work_dir='spack-test')

    run_after('install')(PackageBase._run_default_install_time_test_callbacks)

    # Check that self.prefix is there after installation
    run_after('install')(PackageBase.sanity_check_prefix)

    def view_file_conflicts(self, view, merge_map):
        """Report all file conflicts, excepting special cases for python.
           Specifically, this does not report errors for duplicate
           __init__.py files for packages in the same namespace.
        """
        conflicts = list(dst for src, dst in merge_map.items()
                         if os.path.exists(dst))

        if conflicts and self.py_namespace:
            ext_map = view.extensions_layout.extension_map(self.extendee_spec)
            namespaces = set(x.package.py_namespace for x in ext_map.values())
            namespace_re = (r'site-packages/{0}/__init__.py'.format(
                self.py_namespace))
            find_namespace = match_predicate(namespace_re)
            if self.py_namespace in namespaces:
                conflicts = list(x for x in conflicts if not find_namespace(x))

        return conflicts

    def add_files_to_view(self, view, merge_map):
        bin_dir = self.spec.prefix.bin
        python_prefix = self.extendee_spec.prefix
        python_is_external = self.extendee_spec.external
        global_view = same_path(python_prefix,
                                view.get_projection_for_spec(self.spec))
        for src, dst in merge_map.items():
            if os.path.exists(dst):
                continue
            elif global_view or not path_contains_subdirectory(src, bin_dir):
                view.link(src, dst)
            elif not os.path.islink(src):
                shutil.copy2(src, dst)
                is_script = 'script' in get_filetype(src)
                if is_script and not python_is_external:
                    filter_file(
                        python_prefix,
                        os.path.abspath(view.get_projection_for_spec(
                            self.spec)), dst)
            else:
                orig_link_target = os.path.realpath(src)
                new_link_target = os.path.abspath(merge_map[orig_link_target])
                view.link(new_link_target, dst)

    def remove_files_from_view(self, view, merge_map):
        ignore_namespace = False
        if self.py_namespace:
            ext_map = view.extensions_layout.extension_map(self.extendee_spec)
            remaining_namespaces = set(spec.package.py_namespace
                                       for name, spec in ext_map.items()
                                       if name != self.name)
            if self.py_namespace in remaining_namespaces:
                namespace_init = match_predicate(
                    r'site-packages/{0}/__init__.py'.format(self.py_namespace))
                ignore_namespace = True

        bin_dir = self.spec.prefix.bin
        global_view = (
            self.extendee_spec.prefix == view.get_projection_for_spec(
                self.spec))

        to_remove = []
        for src, dst in merge_map.items():
            if ignore_namespace and namespace_init(dst):
                continue

            if global_view or not path_contains_subdirectory(src, bin_dir):
                to_remove.append(dst)
            else:
                os.remove(dst)

        view.remove_files(to_remove)
Exemplo n.º 30
0
class SIPPackage(PackageBase):
    """Specialized class for packages that are built using the
    SIP build system. See https://www.riverbankcomputing.com/software/sip/intro
    for more information.

    This class provides the following phases that can be overridden:

    * configure
    * build
    * install

    The configure phase already adds a set of default flags. To see more
    options, run ``python configure.py --help``.
    """
    # Default phases
    phases = ['configure', 'build', 'install']

    # To be used in UI queries that require to know which
    # build-system class we are using
    build_system_class = 'SIPPackage'

    #: Name of private sip module to install alongside package
    sip_module = 'sip'

    #: Callback names for install-time test
    install_time_test_callbacks = ['import_module_test']

    extends('python')

    depends_on('qt')
    depends_on('py-sip')

    def python(self, *args, **kwargs):
        """The python ``Executable``."""
        inspect.getmodule(self).python(*args, **kwargs)

    def configure_file(self):
        """Returns the name of the configure file to use."""
        return 'configure.py'

    def configure(self, spec, prefix):
        """Configure the package."""
        configure = self.configure_file()

        args = self.configure_args()

        python_include_dir = 'python' + str(spec['python'].version.up_to(2))

        args.extend([
            '--verbose',
            '--confirm-license',
            '--qmake',
            spec['qt'].prefix.bin.qmake,
            '--sip',
            spec['py-sip'].prefix.bin.sip,
            '--sip-incdir',
            join_path(spec['py-sip'].prefix.include, python_include_dir),
            '--bindir',
            prefix.bin,
            '--destdir',
            inspect.getmodule(self).site_packages_dir,
        ])

        self.python(configure, *args)

    def configure_args(self):
        """Arguments to pass to configure."""
        return []

    def build(self, spec, prefix):
        """Build the package."""
        args = self.build_args()

        inspect.getmodule(self).make(*args)

    def build_args(self):
        """Arguments to pass to build."""
        return []

    def install(self, spec, prefix):
        """Install the package."""
        args = self.install_args()

        inspect.getmodule(self).make('install', parallel=False, *args)

    def install_args(self):
        """Arguments to pass to install."""
        return []

    # Testing

    def import_module_test(self):
        """Attempts to import the module that was just installed.

        This test is only run if the package overrides
        :py:attr:`import_modules` with a list of module names."""

        # Make sure we are importing the installed modules,
        # not the ones in the current directory
        with working_dir('spack-test', create=True):
            for module in self.import_modules:
                self.python('-c', 'import {0}'.format(module))

    run_after('install')(PackageBase._run_default_install_time_test_callbacks)

    # Check that self.prefix is there after installation
    run_after('install')(PackageBase.sanity_check_prefix)

    @run_after('install')
    def extend_path_setup(self):
        # See github issue #14121 and PR #15297
        module = self.spec['py-sip'].variants['module'].value
        if module != 'sip':
            module = module.split('.')[0]
            with working_dir(inspect.getmodule(self).site_packages_dir):
                with open(os.path.join(module, '__init__.py'), 'a') as f:
                    f.write('from pkgutil import extend_path\n')
                    f.write('__path__ = extend_path(__path__, __name__)\n')