Exemple #1
0
def decompressor_for(path):
    """Get the appropriate decompressor for a path."""
    if path.endswith(".zip"):
        unzip = which('unzip', required=True)
        return unzip
    tar = which('tar', required=True)
    tar.add_default_arg('-xf')
    return tar
Exemple #2
0
    def __call__(self, stage, url):
        """Try to guess the type of build system used by a project based on
        the contents of its archive or the URL it was downloaded from."""

        # Most octave extensions are hosted on Octave-Forge:
        #     http://octave.sourceforge.net/index.html
        # They all have the same base URL.
        if 'downloads.sourceforge.net/octave/' in url:
            self.build_system = 'octave'
            return

        # A list of clues that give us an idea of the build system a package
        # uses. If the regular expression matches a file contained in the
        # archive, the corresponding build system is assumed.
        # NOTE: Order is important here. If a package supports multiple
        # build systems, we choose the first match in this list.
        clues = [
            (r'/CMakeLists\.txt$',    'cmake'),
            (r'/configure$',          'autotools'),
            (r'/configure\.(in|ac)$', 'autoreconf'),
            (r'/Makefile\.am$',       'autoreconf'),
            (r'/SConstruct$',         'scons'),
            (r'/waf$',                'waf'),
            (r'/setup\.py$',          'python'),
            (r'/NAMESPACE$',          'r'),
            (r'/WORKSPACE$',          'bazel'),
            (r'/Build\.PL$',          'perlbuild'),
            (r'/Makefile\.PL$',       'perlmake'),
            (r'/.*\.pro$',            'qmake'),
            (r'/(GNU)?[Mm]akefile$',  'makefile'),
            (r'/DESCRIPTION$',        'octave'),
            (r'/meson\.build$',       'meson'),
        ]

        # Peek inside the compressed file.
        if stage.archive_file.endswith('.zip'):
            try:
                unzip  = which('unzip')
                output = unzip('-lq', stage.archive_file, output=str)
            except ProcessError:
                output = ''
        else:
            try:
                tar    = which('tar')
                output = tar('--exclude=*/*/*', '-tf',
                             stage.archive_file, output=str)
            except ProcessError:
                output = ''
        lines = output.split('\n')

        # Determine the build system based on the files contained
        # in the archive.
        for pattern, bs in clues:
            if any(re.search(pattern, l) for l in lines):
                self.build_system = bs
                break
Exemple #3
0
def decompressor_for(path, extension=None):
    """Get the appropriate decompressor for a path."""
    if ((extension and re.match(r'\.?zip$', extension)) or
            path.endswith('.zip')):
        unzip = which('unzip', required=True)
        unzip.add_default_arg('-q')
        return unzip
    if extension and re.match(r'gz', extension):
        gunzip = which('gunzip', required=True)
        return gunzip
    tar = which('tar', required=True)
    tar.add_default_arg('-xf')
    return tar
Exemple #4
0
def get_origin_url():
    git_dir = join_path(spack.prefix, '.git')
    git = which('git', required=True)
    origin_url = git(
        '--git-dir=%s' % git_dir, 'config', '--get', 'remote.origin.url',
        return_output=True)
    return origin_url.strip()
Exemple #5
0
def _debug_tarball_suffix():
    now = datetime.now()
    suffix = now.strftime('%Y-%m-%d-%H%M%S')

    git = which('git')
    if not git:
        return 'nobranch-nogit-%s' % suffix

    with working_dir(spack.paths.prefix):
        if not os.path.isdir('.git'):
            return 'nobranch.nogit.%s' % suffix

        # Get symbolic branch name and strip any special chars (mainly '/')
        symbolic = git(
            'rev-parse', '--abbrev-ref', '--short', 'HEAD', output=str).strip()
        symbolic = re.sub(r'[^\w.-]', '-', symbolic)

        # Get the commit hash too.
        commit = git(
            'rev-parse', '--short', 'HEAD', output=str).strip()

        if symbolic == commit:
            return "nobranch.%s.%s" % (commit, suffix)
        else:
            return "%s.%s.%s" % (symbolic, commit, suffix)
Exemple #6
0
    def __init__(self):
        self.tempdir = tempfile.mkdtemp()

        self.directory = os.path.join(self.tempdir, 'dir')
        mkdirp(self.directory)

        # Script with short shebang
        self.short_shebang = os.path.join(self.tempdir, 'short')
        with open(self.short_shebang, 'w') as f:
            f.write(short_line)
            f.write(last_line)

        # Script with long shebang
        self.long_shebang = os.path.join(self.tempdir, 'long')
        with open(self.long_shebang, 'w') as f:
            f.write(long_line)
            f.write(last_line)

        # Lua script with long shebang
        self.lua_shebang = os.path.join(self.tempdir, 'lua')
        with open(self.lua_shebang, 'w') as f:
            f.write(lua_line)
            f.write(last_line)

        # Script already using sbang.
        self.has_sbang = os.path.join(self.tempdir, 'shebang')
        with open(self.has_sbang, 'w') as f:
            f.write(sbang_line)
            f.write(long_line)
            f.write(last_line)

        # Fake binary file.
        self.binary = os.path.join(self.tempdir, 'binary')
        tar = which('tar', required=True)
        tar('czf', self.binary, self.has_sbang)
Exemple #7
0
def test_create_db_tarball(tmpdir, database):
    with tmpdir.as_cwd():
        debug('create-db-tarball')

        # get the first non-dotfile to avoid coverage files in the directory
        files = os.listdir(os.getcwd())
        tarball_name = next(f for f in files if not f.startswith('.'))

        # debug command made an archive
        assert os.path.exists(tarball_name)

        # print contents of archive
        tar = which('tar')
        contents = tar('tzf', tarball_name, output=str)

        # DB file is included
        assert 'index.json' in contents

        # spec.yamls from all installs are included
        for spec in database.query():
            # externals won't have a spec.yaml
            if spec.external:
                continue

            spec_suffix = '%s/.spack/spec.yaml' % spec.dag_hash()
            assert spec_suffix in contents
Exemple #8
0
    def setUp(self):
        """This sets up a mock archive to fetch, and a mock temp space for use
           by the Stage class.  It doesn't actually create the Stage -- that
           is done by individual tests.
        """
        if os.path.exists(test_files_dir):
            shutil.rmtree(test_files_dir)

        mkdirp(test_files_dir)
        mkdirp(archive_dir_path)
        mkdirp(test_tmp_path)

        with closing(open(test_readme, 'w')) as readme:
            readme.write(readme_text)

        with working_dir(test_files_dir):
            tar = which('tar')
            tar('czf', archive_name, archive_dir)

        # Make spack use the test environment for tmp stuff.
        self.old_tmp_dirs = spack.tmp_dirs
        spack.tmp_dirs = [test_tmp_path]

        # record this since this test changes to directories that will
        # be removed.
        self.working_dir = os.getcwd()
Exemple #9
0
    def __call__(self, stage):
        """Try to guess the type of build system used by the project, and return
           an appropriate configure line.
        """
        autotools = "configure('--prefix=%s' % prefix)"
        cmake     = "cmake('.', *std_cmake_args)"
        python    = "python('setup.py', 'install', '--prefix=%s' % prefix)"

        config_lines = ((r'/configure$',      'autotools', autotools),
                        (r'/CMakeLists.txt$', 'cmake',     cmake),
                        (r'/setup.py$',       'python',    python))

        # Peek inside the tarball.
        tar = which('tar')
        output = tar(
            "--exclude=*/*/*", "-tf", stage.archive_file, return_output=True)
        lines = output.split("\n")

        # Set the configure line to the one that matched.
        for pattern, bs, cl in config_lines:
            if any(re.search(pattern, l) for l in lines):
                config_line = cl
                build_system = bs
                break
        else:
            # None matched -- just put both, with cmake commented out
            config_line =  "# FIXME: Spack couldn't guess one, so here are some options:\n"
            config_line += "        # " + autotools + "\n"
            config_line += "        # " + cmake
            build_system = 'unknown'

        self.configure = config_line
        self.build_system = build_system
Exemple #10
0
    def setUp(self):
        super(InstallTest, self).setUp()

        self.stage = Stage('not_a_real_url')
        archive_dir = join_path(self.stage.path, dir_name)
        dummy_configure = join_path(archive_dir, 'configure')

        mkdirp(archive_dir)
        with closing(open(dummy_configure, 'w')) as configure:
            configure.write(
                "#!/bin/sh\n"
                "prefix=$(echo $1 | sed 's/--prefix=//')\n"
                "cat > Makefile <<EOF\n"
                "all:\n"
                "\techo Building...\n\n"
                "install:\n"
                "\tmkdir -p $prefix\n"
                "\ttouch $prefix/dummy_file\n"
                "EOF\n")
        os.chmod(dummy_configure, 0755)

        with working_dir(self.stage.path):
            tar = which('tar')
            tar('-czf', archive_name, dir_name)

        # We use a fake pacakge, so skip the checksum.
        spack.do_checksum = False
Exemple #11
0
def bootstrap(parser, args):
    origin_url, branch = get_origin_info(args.remote)
    prefix = args.prefix

    tty.msg("Fetching spack from '%s': %s" % (args.remote, origin_url))

    if os.path.isfile(prefix):
        tty.die("There is already a file at %s" % prefix)

    mkdirp(prefix)

    if os.path.exists(join_path(prefix, '.git')):
        tty.die("There already seems to be a git repository in %s" % prefix)

    files_in_the_way = os.listdir(prefix)
    if files_in_the_way:
        tty.die("There are already files there! "
                "Delete these files before boostrapping spack.",
                *files_in_the_way)

    tty.msg("Installing:",
            "%s/bin/spack" % prefix,
            "%s/lib/spack/..." % prefix)

    os.chdir(prefix)
    git = which('git', required=True)
    git('init', '--shared', '-q')
    git('remote', 'add', 'origin', origin_url)
    git('fetch', 'origin', '%s:refs/remotes/origin/%s' % (branch, branch),
                           '-n', '-q')
    git('reset', '--hard', 'origin/%s' % branch, '-q')
    git('checkout', '-B', branch, 'origin/%s' % branch, '-q')

    tty.msg("Successfully created a new spack in %s" % prefix,
            "Run %s/bin/spack to use this installation." % prefix)
Exemple #12
0
    def _default_target_from_env(self):
        '''Set and return the default CrayPE target loaded in a clean login
        session.

        A bash subshell is launched with a wiped environment and the list of
        loaded modules is parsed for the first acceptable CrayPE target.
        '''
        # Based on the incantation:
        # echo "$(env - USER=$USER /bin/bash -l -c 'module list -lt')"
        if getattr(self, 'default', None) is None:
            env = which('env')
            env.add_default_arg('-')
            # CAUTION - $USER is generally needed in the sub-environment.
            # There may be other variables needed for general success.
            output = env('USER=%s' % os.environ['USER'],
                         'HOME=%s' % os.environ['HOME'],
                         '/bin/bash', '--noprofile', '--norc', '-c',
                         '. /etc/profile; module list -lt',
                         output=str, error=str)
            self._defmods = _get_modules_in_modulecmd_output(output)
            targets = []
            _fill_craype_targets_from_modules(targets, self._defmods)
            self.default = targets[0] if targets else None
            tty.debug("Found default modules:",
                      *["     %s" % mod for mod in self._defmods])
        return self.default
Exemple #13
0
    def setUp(self):
        super(InstallTest, self).setUp()

        self.stage = Stage('not_a_real_url')
        archive_dir = join_path(self.stage.path, dir_name)
        dummy_configure = join_path(archive_dir, 'configure')

        mkdirp(archive_dir)
        with closing(open(dummy_configure, 'w')) as configure:
            configure.write(
                "#!/bin/sh\n"
                "prefix=$(echo $1 | sed 's/--prefix=//')\n"
                "cat > Makefile <<EOF\n"
                "all:\n"
                "\techo Building...\n\n"
                "install:\n"
                "\tmkdir -p $prefix\n"
                "\ttouch $prefix/dummy_file\n"
                "EOF\n")
        os.chmod(dummy_configure, 0755)

        with working_dir(self.stage.path):
            tar = which('tar')
            tar('-czf', archive_name, dir_name)

        # We use a fake package, so skip the checksum.
        spack.do_checksum = False

        # Use a fake install directory to avoid conflicts bt/w
        # installed pkgs and mock packages.
        self.tmpdir = tempfile.mkdtemp()
        self.orig_layout = spack.install_layout
        spack.install_layout = SpecHashDirectoryLayout(self.tmpdir)
Exemple #14
0
    def __call__(self, stage, url):
        """Try to guess the type of build system used by a project based on
        the contents of its archive or the URL it was downloaded from."""

        # Most octave extensions are hosted on Octave-Forge:
        #     http://octave.sourceforge.net/index.html
        # They all have the same base URL.
        if 'downloads.sourceforge.net/octave/' in url:
            self.build_system = 'octave'
            return

        # A list of clues that give us an idea of the build system a package
        # uses. If the regular expression matches a file contained in the
        # archive, the corresponding build system is assumed.
        clues = [
            (r'/configure$',      'autotools'),
            (r'/CMakeLists.txt$', 'cmake'),
            (r'/SConstruct$',     'scons'),
            (r'/setup.py$',       'python'),
            (r'/NAMESPACE$',      'r'),
            (r'/WORKSPACE$',      'bazel')
        ]

        # Peek inside the compressed file.
        if stage.archive_file.endswith('.zip'):
            try:
                unzip  = which('unzip')
                output = unzip('-lq', stage.archive_file, output=str)
            except:
                output = ''
        else:
            try:
                tar    = which('tar')
                output = tar('--exclude=*/*/*', '-tf',
                             stage.archive_file, output=str)
            except:
                output = ''
        lines = output.split('\n')

        # Determine the build system based on the files contained
        # in the archive.
        build_system = 'generic'
        for pattern, bs in clues:
            if any(re.search(pattern, l) for l in lines):
                build_system = bs

        self.build_system = build_system
def set_module_variables_for_package(pkg):
    """Populate the module scope of install() with some useful functions.
       This makes things easier for package writers.
    """
    m = pkg.module

    # number of jobs spack will to build with.
    jobs = multiprocessing.cpu_count()
    if not pkg.parallel:
        jobs = 1
    elif pkg.make_jobs:
        jobs = pkg.make_jobs
    m.make_jobs = jobs

    # TODO: make these build deps that can be installed if not found.
    m.make = MakeExecutable("make", jobs)
    m.gmake = MakeExecutable("gmake", jobs)

    # easy shortcut to os.environ
    m.env = os.environ

    # Find the configure script in the archive path
    # Don't use which for this; we want to find it in the current dir.
    m.configure = Executable("./configure")

    # TODO: shouldn't really use "which" here.  Consider adding notion
    # TODO: of build dependencies, as opposed to link dependencies.
    # TODO: Currently, everything is a link dependency, but tools like
    # TODO: this shouldn't be.
    m.cmake = which("cmake")

    # standard CMake arguments
    m.std_cmake_args = ["-DCMAKE_INSTALL_PREFIX=%s" % pkg.prefix, "-DCMAKE_BUILD_TYPE=RelWithDebInfo"]
    if platform.mac_ver()[0]:
        m.std_cmake_args.append("-DCMAKE_FIND_FRAMEWORK=LAST")

    # Set up CMake rpath
    m.std_cmake_args.append("-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE")
    m.std_cmake_args.append("-DCMAKE_INSTALL_RPATH=%s" % ":".join(get_rpaths(pkg)))

    # Emulate some shell commands for convenience
    m.pwd = os.getcwd
    m.cd = os.chdir
    m.mkdir = os.mkdir
    m.makedirs = os.makedirs
    m.remove = os.remove
    m.removedirs = os.removedirs
    m.symlink = os.symlink

    m.mkdirp = mkdirp
    m.install = install
    m.install_tree = install_tree
    m.rmtree = shutil.rmtree
    m.move = shutil.move

    # Useful directories within the prefix are encapsulated in
    # a Prefix object.
    m.prefix = pkg.prefix
Exemple #16
0
    def configure(self, spec, prefix):
        aspell = spec['aspell'].prefix.bin.aspell
        prezip = spec['aspell'].prefix.bin.prezip
        destdir = prefix

        sh = which('sh')
        sh('./configure', '--vars', "ASPELL={0}".format(aspell),
           "PREZIP={0}".format(prezip),
           "DESTDIR={0}".format(destdir))
Exemple #17
0
def list_packages(rev):
    pkgpath = os.path.join(spack.paths.packages_path, 'packages')
    relpath = pkgpath[len(spack.paths.prefix + os.path.sep):] + os.path.sep

    git = which('git', required=True)
    with working_dir(spack.paths.prefix):
        output = git('ls-tree', '--full-tree', '--name-only', rev, relpath,
                     output=str)
    return sorted(line[len(relpath):] for line in output.split('\n') if line)
Exemple #18
0
def pkg_add(args):
    for pkg_name in args.packages:
        filename = spack.repo.path.filename_for_package_name(pkg_name)
        if not os.path.isfile(filename):
            tty.die("No such package: %s.  Path does not exist:" % pkg_name,
                    filename)

        git = which('git', required=True)
        with working_dir(spack.paths.prefix):
            git('-C', spack.paths.packages_path, 'add', filename)
Exemple #19
0
    def git(self):
        if not self._git:
            self._git = which('git', required=True)

            # If the user asked for insecure fetching, make that work
            # with git as well.
            if not spack.config.get('config:verify_ssl'):
                self._git.add_default_env('GIT_SSL_NO_VERIFY', 'true')

        return self._git
Exemple #20
0
def test_which_relative_path_with_slash(tmpdir, working_env):
    tmpdir.ensure("exe")
    path = str(tmpdir.join("exe"))

    os.environ['PATH'] = ''

    with tmpdir.as_cwd():
        no_exe = ex.which('.{0}exe'.format(os.path.sep))
        assert no_exe is None
        if sys.platform == "win32":
            # These checks are for 'executable' files, Windows
            # determines this by file extension.
            path += ".exe"
            tmpdir.ensure('exe.exe')
        else:
            fs.set_executable(path)

        exe = ex.which('.{0}exe'.format(os.path.sep))
        assert exe.path == path
Exemple #21
0
 def _avail_targets(self):
     '''Return a list of available CrayPE CPU targets.'''
     if getattr(self, '_craype_targets', None) is None:
         module = which('modulecmd', required=True)
         module.add_default_arg('python')
         output = module('avail', '-t', 'craype-', output=str, error=str)
         craype_modules = _get_modules_in_modulecmd_output(output)
         self._craype_targets = targets = []
         _fill_craype_targets_from_modules(targets, craype_modules)
     return self._craype_targets
Exemple #22
0
    def git(self):
        if not self._git:
            self._git = which('git', required=True)

            # If the user asked for insecure fetching, make that work
            # with git as well.
            if not spack.config.get('config:verify_ssl'):
                self._git.add_default_env('GIT_SSL_NO_VERIFY', 'true')

        return self._git
Exemple #23
0
def has_develop_branch():
    git = which('git')
    if not git:
        return False
    git("show-ref",
        "--verify",
        "--quiet",
        "refs/heads/develop",
        fail_on_error=False)
    return git.returncode == 0
Exemple #24
0
def pkg_add(args):
    for pkg_name in args.packages:
        filename = spack.repo.path.filename_for_package_name(pkg_name)
        if not os.path.isfile(filename):
            tty.die("No such package: %s.  Path does not exist:" %
                    pkg_name, filename)

        git = which('git', required=True)
        with working_dir(spack.paths.prefix):
            git('-C', spack.paths.packages_path, 'add', filename)
Exemple #25
0
 def _avail_targets(self):
     '''Return a list of available CrayPE CPU targets.'''
     if getattr(self, '_craype_targets', None) is None:
         module = which('modulecmd', required=True)
         module.add_default_arg('python')
         output = module('avail', '-t', 'craype-', output=str, error=str)
         craype_modules = _get_modules_in_modulecmd_output(output)
         self._craype_targets = targets = []
         _fill_craype_targets_from_modules(targets, craype_modules)
     return self._craype_targets
Exemple #26
0
    def __init__(self):
        self.tempdir = tempfile.mkdtemp()

        self.directory = os.path.join(self.tempdir, 'dir')
        mkdirp(self.directory)

        # Script with short shebang
        self.short_shebang = os.path.join(self.tempdir, 'short')
        with open(self.short_shebang, 'w') as f:
            f.write(short_line)
            f.write(last_line)

        # Script with long shebang
        self.long_shebang = os.path.join(self.tempdir, 'long')
        with open(self.long_shebang, 'w') as f:
            f.write(long_line)
            f.write(last_line)

        # Lua script with long shebang
        self.lua_shebang = os.path.join(self.tempdir, 'lua')
        with open(self.lua_shebang, 'w') as f:
            f.write(lua_line)
            f.write(last_line)

        # Lua script with long shebang
        self.lua_textbang = os.path.join(self.tempdir, 'lua_in_text')
        with open(self.lua_textbang, 'w') as f:
            f.write(short_line)
            f.write(lua_in_text)
            f.write(last_line)

        # Node script with long shebang
        self.node_shebang = os.path.join(self.tempdir, 'node')
        with open(self.node_shebang, 'w') as f:
            f.write(node_line)
            f.write(last_line)

        # Node script with long shebang
        self.node_textbang = os.path.join(self.tempdir, 'node_in_text')
        with open(self.node_textbang, 'w') as f:
            f.write(short_line)
            f.write(node_in_text)
            f.write(last_line)

        # Script already using sbang.
        self.has_sbang = os.path.join(self.tempdir, 'shebang')
        with open(self.has_sbang, 'w') as f:
            f.write(sbang_line)
            f.write(long_line)
            f.write(last_line)

        # Fake binary file.
        self.binary = os.path.join(self.tempdir, 'binary')
        tar = which('tar', required=True)
        tar('czf', self.binary, self.has_sbang)
Exemple #27
0
def write_spconfig(package, dirty):
    # Set-up the environment
    spack.build_environment.setup_package(package, dirty)

    cmd = [str(which('cmake'))] + package.std_cmake_args + package.cmake_args()

    env = dict()

    paths = os.environ['PATH'].split(':')
    paths = [item for item in paths if 'spack/env' not in item]
    env['PATH'] = ':'.join(paths)
    env['SPACK_TRANSITIVE_INCLUDE_PATH'] = spack_transitive_include_path()
    env['CMAKE_PREFIX_PATH'] = os.environ['CMAKE_PREFIX_PATH']
    env['CC'] = os.environ['SPACK_CC']
    env['CXX'] = os.environ['SPACK_CXX']
    env['FC'] = os.environ['SPACK_FC']

    setup_fname = 'spconfig.py'
    with open(setup_fname, 'w') as fout:
        fout.write(r"""#!%s
#

import sys
import os
import subprocess

def cmdlist(str):
    return list(x.strip().replace("'",'') for x in str.split('\n') if x)
env = dict(os.environ)
""" % sys.executable)

        env_vars = sorted(list(env.keys()))
        for name in env_vars:
            val = env[name]
            if string.find(name, 'PATH') < 0:
                fout.write('env[%s] = %s\n' % (repr(name), repr(val)))
            else:
                if name == 'SPACK_TRANSITIVE_INCLUDE_PATH':
                    sep = ';'
                else:
                    sep = ':'

                fout.write('env[%s] = "%s".join(cmdlist("""\n' %
                           (repr(name), sep))
                for part in string.split(val, sep):
                    fout.write('    %s\n' % part)
                fout.write('"""))\n')

        fout.write('\ncmd = cmdlist("""\n')
        fout.write('%s\n' % cmd[0])
        for arg in cmd[1:]:
            fout.write('    %s\n' % arg)
        fout.write('""") + sys.argv[1:]\n')
        fout.write('\nproc = subprocess.Popen(cmd, env=env)\nproc.wait()\n')
        set_executable(setup_fname)
Exemple #28
0
def set_module_variables_for_package(pkg):
    """Populate the module scope of install() with some useful functions.
       This makes things easier for package writers.
    """
    m = pkg.module

    m.make = MakeExecutable('make', pkg.parallel)
    m.gmake = MakeExecutable('gmake', pkg.parallel)

    # easy shortcut to os.environ
    m.env = os.environ

    # number of jobs spack prefers to build with.
    m.make_jobs = multiprocessing.cpu_count()

    # Find the configure script in the archive path
    # Don't use which for this; we want to find it in the current dir.
    m.configure = Executable('./configure')

    # TODO: shouldn't really use "which" here.  Consider adding notion
    # TODO: of build dependencies, as opposed to link dependencies.
    # TODO: Currently, everything is a link dependency, but tools like
    # TODO: this shouldn't be.
    m.cmake = which("cmake")

    # standard CMake arguments
    m.std_cmake_args = [
        '-DCMAKE_INSTALL_PREFIX=%s' % pkg.prefix,
        '-DCMAKE_BUILD_TYPE=RelWithDebInfo'
    ]
    if platform.mac_ver()[0]:
        m.std_cmake_args.append('-DCMAKE_FIND_FRAMEWORK=LAST')

    # Set up CMake rpath
    m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE')
    m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH=%s' %
                            ":".join(get_rpaths(pkg)))

    # Emulate some shell commands for convenience
    m.pwd = os.getcwd
    m.cd = os.chdir
    m.mkdir = os.mkdir
    m.makedirs = os.makedirs
    m.remove = os.remove
    m.removedirs = os.removedirs
    m.symlink = os.symlink

    m.mkdirp = mkdirp
    m.install = install
    m.rmtree = shutil.rmtree
    m.move = shutil.move

    # Useful directories within the prefix are encapsulated in
    # a Prefix object.
    m.prefix = pkg.prefix
Exemple #29
0
    def apply(self, stage):
        """Apply the patch at self.path to the source code in the
        supplied stage

        Args:
            stage: stage for the package that needs to be patched
        """
        stage.chdir_to_source()
        # Use -N to allow the same patches to be applied multiple times.
        _patch = which("patch", required=True)
        _patch('-s', '-p', str(self.level), '-i', self.path)
Exemple #30
0
def _patchelf():
    """Return the full path to the patchelf binary, if available, else None."""
    if is_macos:
        return None

    patchelf = executable.which('patchelf')
    if patchelf is None:
        with spack.bootstrap.ensure_bootstrap_configuration():
            patchelf = spack.bootstrap.ensure_patchelf_in_path_or_raise()

    return patchelf.path
Exemple #31
0
    def apply(self, stage):
        """Apply the patch at self.path to the source code in the
        supplied stage

        Args:
            stage: stage for the package that needs to be patched
        """
        stage.chdir_to_source()
        # Use -N to allow the same patches to be applied multiple times.
        _patch = which("patch", required=True)
        _patch('-s', '-p', str(self.level), '-i', self.path)
Exemple #32
0
def test_which(tmpdir):
    os.environ["PATH"] = str(tmpdir)
    assert ex.which("spack-test-exe") is None

    with pytest.raises(ex.CommandNotFoundError):
        ex.which("spack-test-exe", required=True)

    path = str(tmpdir.join("spack-test-exe"))

    with tmpdir.as_cwd():
        if sys.platform == "win32":
            # For Windows, need to create files with .exe after any assert is none tests
            tmpdir.ensure("spack-test-exe.exe")
            path += ".exe"
        else:
            fs.touch("spack-test-exe")
            fs.set_executable("spack-test-exe")

        exe = ex.which("spack-test-exe")
        assert exe is not None
        assert exe.path == path
Exemple #33
0
    def apply(self, stage):
        """Apply the patch at self.path to the source code in the
        supplied stage

        Args:
            stage: stage for the package that needs to be patched
        """
        patch = which("patch", required=True)
        with working_dir(stage.source_path):
            # Use -N to allow the same patches to be applied multiple times.
            patch('-s', '-p', str(self.level), '-i', self.path, "-d",
                  self.working_dir)
Exemple #34
0
def get_module_cmd_from_which():
    module_cmd = which('modulecmd')
    if not module_cmd:
        raise ModuleError('`which` did not find any modulecmd executable')
    module_cmd.add_default_arg('python')

    # Check that the executable works
    module_cmd('list', output=str, error=str, fail_on_error=False)
    if module_cmd.returncode != 0:
        raise ModuleError('get_module_cmd cannot determine the module command')

    return module_cmd
Exemple #35
0
def test_versions_from_git(mock_git_version_info, monkeypatch, mock_packages):
    repo_path, filename, commits = mock_git_version_info
    monkeypatch.setattr(spack.package.PackageBase,
                        'git',
                        'file://%s' % repo_path,
                        raising=False)

    for commit in commits:
        spec = spack.spec.Spec('git-test-commit@%s' % commit)
        version = spec.version
        comparator = [
            str(v) if not isinstance(v, int) else v
            for v in version._cmp(version.commit_lookup)
        ]

        with working_dir(repo_path):
            which('git')('checkout', commit)
        with open(os.path.join(repo_path, filename), 'r') as f:
            expected = f.read()

        assert str(comparator) == expected
Exemple #36
0
def _system_gunzip(archive_file):
    _, ext = os.path.splitext(archive_file)
    decompressed_file = os.path.basename(archive_file.strip(ext))
    working_dir = os.getcwd()
    destination_abspath = os.path.join(working_dir, decompressed_file)
    compressed_file = os.path.basename(archive_file)
    copy_path = os.path.join(working_dir, compressed_file)
    shutil.copy(archive_file, copy_path)
    gzip = which("gzip")
    gzip.add_default_arg("-d")
    gzip(copy_path)
    return destination_abspath
def test_log_subproc_and_echo_output(capfd, tmpdir):
    echo = which('echo')

    with log_output('foo.txt') as logger:
        with logger.force_echo():
            echo('echo')
        print('logged')

    assert capfd.readouterr() == ('echo\n', '')

    with open('foo.txt') as f:
        assert f.read() == 'logged\n'
Exemple #38
0
    def install(self, spec, prefix):

        # edit cns_solve_environment to allow a build
        shutil.copy('cns_solve_env', 'cns_solve_env.back')
        filter_file(r"setenv CNS_SOLVE '_CNSsolve_location_'",
                    f"setenv CNS_SOLVE '{self.stage.source_path}'",
                    'cns_solve_env')

        # copy over an almost right machine make file we could have got it from v1.3 but this is simpler
        src_file = 'instlib/machine/supported/intel-x86_64bit-linux/Makefile.header.2.gfortran'
        dest_file = 'instlib/machine/supported/mac-intel-darwin/Makefile.header.5.gfortran'
        shutil.move(src_file, dest_file)

        if not self.spec.satisfies('%fortran@:10.0.0'):
            # patch the machine make file, can't be done with a patch statement it doesn't exists till we copy it
            # tried just copying the file from the package directory but it caused a lockup
            patch = which('patch')
            patch_file = join_path(
                package_root, 'nmrpack/packages/cns',
                'gfortran_10_allow_argument_mismatch.patch')
            patch('-p1', '-i', patch_file)

        if '+aria' in self.spec:
            from_path = pathlib.Path('aria2.3/cns/src')
            to_path = 'source'

            for target_file in from_path.iterdir():
                if target_file.is_file() and target_file.suffix in ('.f',
                                                                    '.inc'):
                    print(f'copying {target_file} to {to_path}')
                    shutil.copy(target_file, to_path)
                if target_file.is_dir():
                    print(f'copying {target_file} to {to_path}')
                    shutil.copytree(target_file,
                                    join_path(to_path, target_file.name))

            shutil.copytree(from_path, 'aria2.3_patches_applied')
            shutil.rmtree('aria2.3')

        make('install')

        install_tree('.', prefix)

        with working_dir(prefix):
            shutil.move('cns_solve_env.back', 'cns_solve_env')

            replacement_env = f" setenv CNS_SOLVE  '{prefix}'"
            filter_file(r"setenv CNS_SOLVE '_CNSsolve_location_'",
                        replacement_env, 'cns_solve_env')

        # remove a leftover from our previous edits
        os.remove(pathlib.Path(prefix) / pathlib.Path('cns_solve_env' + '~'))
Exemple #39
0
def apply_patch(stage, patch_path, level=1, working_dir='.'):
    """Apply the patch at patch_path to code in the stage.

    Args:
        stage (spack.stage.Stage): stage with code that will be patched
        patch_path (str): filesystem location for the patch to apply
        level (int, optional): patch level (default 1)
        working_dir (str): relative path *within* the stage to change to
            (default '.')
    """
    patch = which("patch", required=True)
    with llnl.util.filesystem.working_dir(stage.source_path):
        patch('-s', '-p', str(level), '-i', patch_path, '-d', working_dir)
Exemple #40
0
def test_log_subproc_and_echo_output_capfd(capfd, tmpdir):
    echo = which('echo')

    # This tests *only* what is echoed when using a subprocess, as capfd
    # interferes with the logged data. See
    # test_log_subproc_and_echo_output_no_capfd for tests on the logfile.
    with tmpdir.as_cwd():
        with log_output('foo.txt') as logger:
            with logger.force_echo():
                echo('echo')
            print('logged')

        assert capfd.readouterr()[0] == "echo\n"
Exemple #41
0
def compile_c_and_execute(source_file, include_flags, link_flags):
    """Compile C @p source_file with @p include_flags and @p link_flags,
    run and return the output.
    """
    cc = which('cc')
    flags = include_flags
    flags.extend([source_file])
    cc('-c', *flags)
    name = os.path.splitext(os.path.basename(source_file))[0]
    cc('-o', "check", "%s.o" % name, *link_flags)

    check = Executable('./check')
    return check(output=str)
def test_log_subproc_output(capsys, tmpdir):
    echo = which('echo')

    # pytest seems to interfere here, so we need to use capsys.disabled()
    # TODO: figure out why this is and whether it means we're doing
    # sometihng wrong with OUR redirects.  Seems like it should work even
    # with capsys enabled.
    with capsys.disabled():
        with log_output('foo.txt'):
            echo('logged')

    with open('foo.txt') as f:
        assert f.read() == 'logged\n'
Exemple #43
0
    def hg(self):
        """:returns: The hg executable
        :rtype: Executable
        """
        if not self._hg:
            self._hg = which('hg', required=True)

            # When building PythonPackages, Spack automatically sets
            # PYTHONPATH. This can interfere with hg, which is a Python
            # script. Unset PYTHONPATH while running hg.
            self._hg.add_default_env('PYTHONPATH', '')

        return self._hg
Exemple #44
0
def list_packages(rev):
    pkgpath = os.path.join(spack.paths.packages_path, 'packages')
    relpath = pkgpath[len(spack.paths.prefix + os.path.sep):] + os.path.sep

    git = which('git', required=True)
    with working_dir(spack.paths.prefix):
        output = git('ls-tree',
                     '--full-tree',
                     '--name-only',
                     rev,
                     relpath,
                     output=str)
    return sorted(line[len(relpath):] for line in output.split('\n') if line)
Exemple #45
0
def test_log_subproc_and_echo_output(capfd, tmpdir):
    echo = which('echo')

    with tmpdir.as_cwd():
        with log_output('foo.txt') as logger:
            with logger.force_echo():
                echo('echo')
            print('logged')

        assert capfd.readouterr() == ('echo\n', '')

        with open('foo.txt') as f:
            assert f.read() == 'logged\n'
Exemple #46
0
def merge(parser, args, unknown_args):
    args.run = os.path.abspath(args.run)
    if len(unknown_args) > 0:
        raise ValueError('Unkown arguments: %s' % unknown_args)

    rundeck_dir = os.path.join(args.run, 'rundeck')
    rundeck_R = os.path.join(rundeck_dir, 'rundeck.R')


    git = executable.which('git')
    os.chdir(rundeck_dir)
    git('add', rundeck_R, echo=sys.stdout)
    git('commit', '-m', 'Merged changes after hand edit', echo=sys.stdout)
Exemple #47
0
    def hg(self):
        """:returns: The hg executable
        :rtype: Executable
        """
        if not self._hg:
            self._hg = which('hg', required=True)

            # When building PythonPackages, Spack automatically sets
            # PYTHONPATH. This can interfere with hg, which is a Python
            # script. Unset PYTHONPATH while running hg.
            self._hg.add_default_env('PYTHONPATH', '')

        return self._hg
Exemple #48
0
def check_mirror():
    with Stage('spack-mirror-test') as stage:
        mirror_root = os.path.join(stage.path, 'test-mirror')
        # register mirror with spack config
        mirrors = {'spack-mirror-test': 'file://' + mirror_root}
        with spack.config.override('mirrors', mirrors):
            with spack.config.override('config:checksum', False):
                specs = [Spec(x).concretized() for x in repos]
                spack.mirror.create(mirror_root, specs)

            # Stage directory exists
            assert os.path.isdir(mirror_root)

            for spec in specs:
                fetcher = spec.package.fetcher[0]
                per_package_ref = os.path.join(
                    spec.name, '-'.join([spec.name, str(spec.version)]))
                mirror_paths = spack.mirror.mirror_archive_paths(
                    fetcher,
                    per_package_ref)
                expected_path = os.path.join(
                    mirror_root, mirror_paths.storage_path)
                assert os.path.exists(expected_path)

            # Now try to fetch each package.
            for name, mock_repo in repos.items():
                spec = Spec(name).concretized()
                pkg = spec.package

                with spack.config.override('config:checksum', False):
                    with pkg.stage:
                        pkg.do_stage(mirror_only=True)

                        # Compare the original repo with the expanded archive
                        original_path = mock_repo.path
                        if 'svn' in name:
                            # have to check out the svn repo to compare.
                            original_path = os.path.join(
                                mock_repo.path, 'checked_out')

                            svn = which('svn', required=True)
                            svn('checkout', mock_repo.url, original_path)

                        dcmp = filecmp.dircmp(
                            original_path, pkg.stage.source_path)

                        # make sure there are no new files in the expanded
                        # tarball
                        assert not dcmp.right_only
                        # and that all original files are present.
                        assert all(left in exclude for left in dcmp.left_only)
Exemple #49
0
def check_mirror():
    with Stage('spack-mirror-test') as stage:
        mirror_root = join_path(stage.path, 'test-mirror')
        # register mirror with spack config
        mirrors = {'spack-mirror-test': 'file://' + mirror_root}
        spack.config.update_config('mirrors', mirrors)

        os.chdir(stage.path)
        spack.mirror.create(
            mirror_root, repos, no_checksum=True
        )

        # Stage directory exists
        assert os.path.isdir(mirror_root)

        # check that there are subdirs for each package
        for name in repos:
            subdir = join_path(mirror_root, name)
            assert os.path.isdir(subdir)

            files = os.listdir(subdir)
            assert len(files) == 1

            # Now try to fetch each package.
            for name, mock_repo in repos.items():
                spec = Spec(name).concretized()
                pkg = spec.package

                saved_checksum_setting = spack.do_checksum
                with pkg.stage:
                    # Stage the archive from the mirror and cd to it.
                    spack.do_checksum = False
                    pkg.do_stage(mirror_only=True)

                    # Compare the original repo with the expanded archive
                    original_path = mock_repo.path
                    if 'svn' in name:
                        # have to check out the svn repo to compare.
                        original_path = join_path(
                            mock_repo.path, 'checked_out')

                        svn = which('svn', required=True)
                        svn('checkout', mock_repo.url, original_path)

                    dcmp = filecmp.dircmp(original_path, pkg.stage.source_path)
                    # make sure there are no new files in the expanded
                    # tarball
                    assert not dcmp.right_only
                    # and that all original files are present.
                    assert all(l in exclude for l in dcmp.left_only)
                    spack.do_checksum = saved_checksum_setting
Exemple #50
0
def get_module_cmd_from_bash(bashopts=''):
    # Find how the module function is defined in the environment
    module_func = os.environ.get('BASH_FUNC_module()', None)
    if module_func:
        module_func = os.path.expandvars(module_func)
    else:
        module_func_proc = subprocess.Popen(
            ['{0} typeset -f module | '
             'envsubst'.format(bashopts)],
            stdout=subprocess.PIPE,
            stderr=subprocess.STDOUT,
            executable='/bin/bash',
            shell=True)
        module_func_proc.wait()
        module_func = module_func_proc.stdout.read()

    # Find the portion of the module function that is evaluated
    try:
        find_exec = re.search(r'.*`(.*(:? bash | sh ).*)`.*', module_func)
        exec_line = find_exec.group(1)
    except BaseException:
        try:
            # This will fail with nested parentheses. TODO: expand regex.
            find_exec = re.search(r'.*\(([^()]*(:? bash | sh )[^()]*)\).*',
                                  module_func)
            exec_line = find_exec.group(1)
        except BaseException:
            raise ModuleError('get_module_cmd cannot '
                              'determine the module command from bash')

    # Create an executable
    args = exec_line.split()
    module_cmd = which(args[0])
    if module_cmd:
        for arg in args[1:]:
            if arg in ('bash', 'sh'):
                module_cmd.add_default_arg('python')
                break
            else:
                module_cmd.add_default_arg(arg)
    else:
        raise ModuleError('Could not create executable based on module'
                          ' function.')

    # Check that the executable works
    module_cmd('list', output=str, error=str, fail_on_error=False)
    if module_cmd.returncode != 0:
        raise ModuleError('get_module_cmd cannot determine the module command'
                          'from bash.')

    return module_cmd
Exemple #51
0
def compile_c_and_execute(source_file, include_flags, link_flags):
    """Compile C @p source_file with @p include_flags and @p link_flags,
    run and return the output.
    """
    cc = which('cc')
    flags = include_flags
    flags.extend([source_file])
    cc('-c', *flags)
    name = os.path.splitext(os.path.basename(source_file))[0]
    cc('-o', "check", "%s.o" % name,
       *link_flags)

    check = Executable('./check')
    return check(output=str)
Exemple #52
0
def test_log_subproc_and_echo_output(capfd, tmpdir):
    echo = which('echo')

    with tmpdir.as_cwd():
        with log_output('foo.txt') as logger:
            with logger.force_echo():
                echo('echo')
            print('logged')

        # Coverage is cluttering stderr during tests
        assert capfd.readouterr()[0] == 'echo\n'

        with open('foo.txt') as f:
            assert f.read() == 'logged\n'
Exemple #53
0
def test_log_subproc_output(capsys, tmpdir):
    echo = which('echo')

    # pytest seems to interfere here, so we need to use capsys.disabled()
    # TODO: figure out why this is and whether it means we're doing
    # sometihng wrong with OUR redirects.  Seems like it should work even
    # with capsys enabled.
    with tmpdir.as_cwd():
        with capsys.disabled():
            with log_output('foo.txt'):
                echo('logged')

        with open('foo.txt') as f:
            assert f.read() == 'logged\n'
Exemple #54
0
def test_which_with_slash_ignores_path(tmpdir, working_env):
    tmpdir.ensure('exe')
    tmpdir.ensure('bin{0}exe'.format(os.path.sep))

    path = str(tmpdir.join('exe'))
    wrong_path = str(tmpdir.join('bin', 'exe'))
    os.environ['PATH'] = os.path.dirname(wrong_path)

    fs.set_executable(path)
    fs.set_executable(wrong_path)

    with tmpdir.as_cwd():
        exe = ex.which('./exe')
        assert exe.path == path
Exemple #55
0
def check_mirror():
    with Stage('spack-mirror-test') as stage:
        mirror_root = join_path(stage.path, 'test-mirror')
        # register mirror with spack config
        mirrors = {'spack-mirror-test': 'file://' + mirror_root}
        spack.config.update_config('mirrors', mirrors)

        os.chdir(stage.path)
        spack.mirror.create(mirror_root, repos, no_checksum=True)

        # Stage directory exists
        assert os.path.isdir(mirror_root)

        # check that there are subdirs for each package
        for name in repos:
            subdir = join_path(mirror_root, name)
            assert os.path.isdir(subdir)

            files = os.listdir(subdir)
            assert len(files) == 1

            # Now try to fetch each package.
            for name, mock_repo in repos.items():
                spec = Spec(name).concretized()
                pkg = spec.package

                saved_checksum_setting = spack.do_checksum
                with pkg.stage:
                    # Stage the archive from the mirror and cd to it.
                    spack.do_checksum = False
                    pkg.do_stage(mirror_only=True)

                    # Compare the original repo with the expanded archive
                    original_path = mock_repo.path
                    if 'svn' in name:
                        # have to check out the svn repo to compare.
                        original_path = join_path(mock_repo.path,
                                                  'checked_out')

                        svn = which('svn', required=True)
                        svn('checkout', mock_repo.url, original_path)

                    dcmp = filecmp.dircmp(original_path, pkg.stage.source_path)
                    # make sure there are no new files in the expanded
                    # tarball
                    assert not dcmp.right_only
                    # and that all original files are present.
                    assert all(l in exclude for l in dcmp.left_only)
                    spack.do_checksum = saved_checksum_setting
Exemple #56
0
    def archive(self, destination, **kwargs):
        assert (extension(destination) == 'tar.gz')
        assert (self.stage.source_path.startswith(self.stage.path))

        tar = which('tar', required=True)

        patterns = kwargs.get('exclude', None)
        if patterns is not None:
            if isinstance(patterns, string_types):
                patterns = [patterns]
            for p in patterns:
                tar.add_default_arg('--exclude=%s' % p)

        with working_dir(self.stage.path):
            tar('-czf', destination, os.path.basename(self.stage.source_path))
Exemple #57
0
    def archive(self, destination, **kwargs):
        assert (extension(destination) == 'tar.gz')
        assert (self.stage.source_path.startswith(self.stage.path))

        tar = which('tar', required=True)

        patterns = kwargs.get('exclude', None)
        if patterns is not None:
            if isinstance(patterns, string_types):
                patterns = [patterns]
            for p in patterns:
                tar.add_default_arg('--exclude=%s' % p)

        with working_dir(self.stage.path):
            tar('-czf', destination, os.path.basename(self.stage.source_path))
Exemple #58
0
def set_module_variables_for_package(pkg):
    """Populate the module scope of install() with some useful functions.
       This makes things easier for package writers.
    """
    m = pkg.module

    m.make  = MakeExecutable('make', pkg.parallel)
    m.gmake = MakeExecutable('gmake', pkg.parallel)

    # easy shortcut to os.environ
    m.env = os.environ

    # number of jobs spack prefers to build with.
    m.make_jobs = multiprocessing.cpu_count()

    # Find the configure script in the archive path
    # Don't use which for this; we want to find it in the current dir.
    m.configure = Executable('./configure')

    # TODO: shouldn't really use "which" here.  Consider adding notion
    # TODO: of build dependencies, as opposed to link dependencies.
    # TODO: Currently, everything is a link dependency, but tools like
    # TODO: this shouldn't be.
    m.cmake = which("cmake")

    # standard CMake arguments
    m.std_cmake_args = ['-DCMAKE_INSTALL_PREFIX=%s' % pkg.prefix,
                        '-DCMAKE_BUILD_TYPE=RelWithDebInfo']
    if platform.mac_ver()[0]:
        m.std_cmake_args.append('-DCMAKE_FIND_FRAMEWORK=LAST')

    # Emulate some shell commands for convenience
    m.pwd        = os.getcwd
    m.cd         = os.chdir
    m.mkdir      = os.mkdir
    m.makedirs   = os.makedirs
    m.remove     = os.remove
    m.removedirs = os.removedirs

    m.mkdirp     = mkdirp
    m.install    = install
    m.rmtree     = shutil.rmtree
    m.move       = shutil.move

    # Useful directories within the prefix are encapsulated in
    # a Prefix object.
    m.prefix  = pkg.prefix
Exemple #59
0
    def __call__(self, stage):
        """Try to guess the type of build system used by the project, and return
           an appropriate configure line.
        """
        tar = which("tar")
        output = tar("--exclude=*/*/*", "-tf", stage.archive_file, return_output=True)

        autotools = 'configure("--prefix=%s" % prefix)'
        cmake = 'cmake(".", *std_cmake_args)'
        lines = output.split("\n")

        if any(re.search(r"/configure$", l) for l in lines):
            self.configure = autotools
        elif any(re.search(r"/CMakeLists.txt$", l) for l in lines):
            self.configure = cmake
        else:
            # Both, with cmake commented out
            self.configure = "%s\n        # %s" % (autotools, cmake)