Ejemplo n.º 1
0
Archivo: patch.py Proyecto: LLNL/spack
def test_url_patch(mock_stage, filename, sha256, archive_sha256):
    # Make a patch object
    url = 'file://' + filename
    m = sys.modules['spack.patch']
    patch = m.Patch.create(
        None, url, sha256=sha256, archive_sha256=archive_sha256)

    # make a stage
    with Stage(url) as stage:  # TODO: url isn't used; maybe refactor Stage
        # TODO: there is probably a better way to mock this.
        stage.mirror_path = mock_stage  # don't disrupt the spack install

        # fake a source path
        with working_dir(stage.path):
            mkdirp('spack-expanded-archive')

        with working_dir(stage.source_path):
            # write a file to be patched
            with open('foo.txt', 'w') as f:
                f.write("""\
first line
second line
""")
            # write the expected result of patching.
            with open('foo-expected.txt', 'w') as f:
                f.write("""\
zeroth line
first line
third line
""")
        # apply the patch and compare files
        patch.apply(stage)

        with working_dir(stage.source_path):
            assert filecmp.cmp('foo.txt', 'foo-expected.txt')
Ejemplo n.º 2
0
Archivo: debug.py Proyecto: LLNL/spack
def _debug_tarball_suffix():
    now = datetime.now()
    suffix = now.strftime('%Y-%m-%d-%H%M%S')

    git = which('git')
    if not git:
        return 'nobranch-nogit-%s' % suffix

    with working_dir(spack.paths.prefix):
        if not os.path.isdir('.git'):
            return 'nobranch.nogit.%s' % suffix

        # Get symbolic branch name and strip any special chars (mainly '/')
        symbolic = git(
            'rev-parse', '--abbrev-ref', '--short', 'HEAD', output=str).strip()
        symbolic = re.sub(r'[^\w.-]', '-', symbolic)

        # Get the commit hash too.
        commit = git(
            'rev-parse', '--short', 'HEAD', output=str).strip()

        if symbolic == commit:
            return "nobranch.%s.%s" % (commit, suffix)
        else:
            return "%s.%s.%s" % (symbolic, commit, suffix)
Ejemplo n.º 3
0
    def test_existing_dir(self, stage):
        """Test copying to an existing directory."""

        with fs.working_dir(str(stage)):
            fs.copy_tree('source', 'dest')

            assert os.path.exists('dest/a/b/2')
Ejemplo n.º 4
0
Archivo: clone.py Proyecto: LLNL/spack
def clone(parser, args):
    origin_url, branch = get_origin_info(args.remote)
    prefix = args.prefix

    tty.msg("Fetching spack from '%s': %s" % (args.remote, origin_url))

    if os.path.isfile(prefix):
        tty.die("There is already a file at %s" % prefix)

    mkdirp(prefix)

    if os.path.exists(os.path.join(prefix, '.git')):
        tty.die("There already seems to be a git repository in %s" % prefix)

    files_in_the_way = os.listdir(prefix)
    if files_in_the_way:
        tty.die("There are already files there! "
                "Delete these files before boostrapping spack.",
                *files_in_the_way)

    tty.msg("Installing:",
            "%s/bin/spack" % prefix,
            "%s/lib/spack/..." % prefix)

    with working_dir(prefix):
        git = which('git', required=True)
        git('init', '--shared', '-q')
        git('remote', 'add', 'origin', origin_url)
        git('fetch', 'origin', '%s:refs/remotes/origin/%s' % (branch, branch),
            '-n', '-q')
        git('reset', '--hard', 'origin/%s' % branch, '-q')
        git('checkout', '-B', branch, 'origin/%s' % branch, '-q')

        tty.msg("Successfully created a new spack in %s" % prefix,
                "Run %s/bin/spack to use this installation." % prefix)
Ejemplo n.º 5
0
Archivo: blame.py Proyecto: LLNL/spack
def test_blame_file(mock_packages):
    """Sanity check the blame command to make sure it works."""
    with working_dir(spack.paths.prefix):
        out = blame('bin/spack')
    assert 'LAST_COMMIT' in out
    assert 'AUTHOR' in out
    assert 'EMAIL' in out
Ejemplo n.º 6
0
Archivo: meson.py Proyecto: LLNL/spack
 def check(self):
     """Searches the Meson-generated file for the target ``test``
     and runs it if found.
     """
     with working_dir(self.build_directory):
         self._if_ninja_target_execute('test')
         self._if_ninja_target_execute('check')
Ejemplo n.º 7
0
 def check(self):
     """Searches the Makefile for targets ``test`` and ``check``
     and runs them if found.
     """
     with working_dir(self.build_directory):
         self._if_make_target_execute('test')
         self._if_make_target_execute('check')
Ejemplo n.º 8
0
Archivo: cmake.py Proyecto: LLNL/spack
 def install(self, spec, prefix):
     """Make the install targets"""
     with working_dir(self.build_directory):
         if self.generator == 'Unix Makefiles':
             inspect.getmodule(self).make(*self.install_targets)
         elif self.generator == 'Ninja':
             inspect.getmodule(self).ninja(*self.install_targets)
Ejemplo n.º 9
0
Archivo: cmake.py Proyecto: LLNL/spack
 def cmake(self, spec, prefix):
     """Runs ``cmake`` in the build directory"""
     options = [os.path.abspath(self.root_cmakelists_dir)]
     options += self.std_cmake_args
     options += self.cmake_args()
     with working_dir(self.build_directory, create=True):
         inspect.getmodule(self).cmake(*options)
Ejemplo n.º 10
0
    def test_non_existing_dir(self, stage):
        """Test installing to a non-existing directory."""

        with fs.working_dir(str(stage)):
            fs.install_tree('source', 'dest/sub/directory')

            assert os.path.exists('dest/sub/directory/a/b/2')
Ejemplo n.º 11
0
    def test_symlinks_true(self, stage):
        """Test copying with symlink preservation."""

        with fs.working_dir(str(stage)):
            fs.copy_tree('source', 'dest', symlinks=True)

            assert os.path.exists('dest/2')
            assert os.path.islink('dest/2')

            assert os.path.exists('dest/a/b2')
            with fs.working_dir('dest/a'):
                assert os.path.exists(os.readlink('b2'))

            assert (os.path.realpath('dest/f/2') ==
                    os.path.abspath('dest/a/b/2'))
            assert os.path.realpath('dest/2') == os.path.abspath('dest/1')
Ejemplo n.º 12
0
    def test_dir_dest(self, stage):
        """Test using a directory as the destination."""

        with fs.working_dir(str(stage)):
            fs.copy('source/1', 'dest')

            assert os.path.exists('dest/1')
Ejemplo n.º 13
0
 def is_configure_or_die(self):
     """Checks the presence of a ``configure`` file after the
     autoreconf phase"""
     with working_dir(self.build_directory()):
         if not os.path.exists('configure'):
             raise RuntimeError(
                 'configure script not found in {0}'.format(os.getcwd()))
Ejemplo n.º 14
0
 def autoreconf(self, spec, prefix):
     """Not needed usually, configure should be already there"""
     # If configure exists nothing needs to be done
     if os.path.exists(self.configure_abs_path):
         return
     # Else try to regenerate it
     autotools = ['m4', 'autoconf', 'automake', 'libtool']
     missing = [x for x in autotools if x not in spec]
     if missing:
         msg = 'Cannot generate configure: missing dependencies {0}'
         raise RuntimeError(msg.format(missing))
     tty.msg('Configure script not found: trying to generate it')
     tty.warn('*********************************************************')
     tty.warn('* If the default procedure fails, consider implementing *')
     tty.warn('*        a custom AUTORECONF phase in the package       *')
     tty.warn('*********************************************************')
     with working_dir(self.configure_directory):
         m = inspect.getmodule(self)
         # This part should be redundant in principle, but
         # won't hurt
         m.libtoolize()
         m.aclocal()
         # This line is what is needed most of the time
         # --install, --verbose, --force
         autoreconf_args = ['-ivf']
         if 'pkg-config' in spec:
             autoreconf_args += [
                 '-I',
                 join_path(spec['pkg-config'].prefix, 'share', 'aclocal'),
             ]
         autoreconf_args += self.autoreconf_extra_args
         m.autoreconf(*autoreconf_args)
Ejemplo n.º 15
0
Archivo: meson.py Proyecto: LLNL/spack
 def meson(self, spec, prefix):
     """Runs ``meson`` in the build directory"""
     options = [os.path.abspath(self.root_mesonlists_dir)]
     options += self.std_meson_args
     options += self.meson_args()
     with working_dir(self.build_directory, create=True):
         inspect.getmodule(self).meson(*options)
Ejemplo n.º 16
0
    def install_all(self, args=None):
        """Install all concretized specs in an environment."""

        # Make sure log directory exists
        log_path = self.log_path
        fs.mkdirp(log_path)

        for concretized_hash in self.concretized_order:
            spec = self.specs_by_hash[concretized_hash]

            # Parse cli arguments and construct a dictionary
            # that will be passed to Package.do_install API
            kwargs = dict()
            if args:
                spack.cmd.install.update_kwargs_from_args(args, kwargs)

            with fs.working_dir(self.path):
                spec.package.do_install(**kwargs)

                # Link the resulting log file into logs dir
                build_log_link = os.path.join(
                    log_path, '%s-%s.log' % (spec.name, spec.dag_hash(7)))
                if os.path.exists(build_log_link):
                    os.remove(build_log_link)
                os.symlink(spec.package.build_log_path, build_log_link)
Ejemplo n.º 17
0
def test_merge_to_existing_directory(stage, link_tree):
    with working_dir(stage.path):

        touchp('dest/x')
        touchp('dest/a/b/y')

        link_tree.merge('dest')

        check_file_link('dest/1')
        check_file_link('dest/a/b/2')
        check_file_link('dest/a/b/3')
        check_file_link('dest/c/4')
        check_file_link('dest/c/d/5')
        check_file_link('dest/c/d/6')
        check_file_link('dest/c/d/e/7')

        assert os.path.isfile('dest/x')
        assert os.path.isfile('dest/a/b/y')

        link_tree.unmerge('dest')

        assert os.path.isfile('dest/x')
        assert os.path.isfile('dest/a/b/y')

        assert not os.path.isfile('dest/1')
        assert not os.path.isfile('dest/a/b/2')
        assert not os.path.isfile('dest/a/b/3')
        assert not os.path.isfile('dest/c/4')
        assert not os.path.isfile('dest/c/d/5')
        assert not os.path.isfile('dest/c/d/6')
        assert not os.path.isfile('dest/c/d/e/7')
Ejemplo n.º 18
0
Archivo: patch.py Proyecto: LLNL/spack
def test_patched_dependency(
        mock_packages, config, install_mockery, mock_fetch):
    """Test whether patched dependencies work."""
    spec = Spec('patch-a-dependency')
    spec.concretize()
    assert 'patches' in list(spec['libelf'].variants.keys())

    # make sure the patch makes it into the dependency spec
    assert (('c45c1564f70def3fc1a6e22139f62cb21cd190cc3a7dbe6f4120fa59ce33dcb8',) ==
            spec['libelf'].variants['patches'].value)

    # make sure the patch in the dependent's directory is applied to the
    # dependency
    libelf = spec['libelf']
    pkg = libelf.package
    pkg.do_patch()
    with pkg.stage:
        with working_dir(pkg.stage.source_path):
            # output a Makefile with 'echo Patched!' as the default target
            configure = Executable('./configure')
            configure()

            # Make sure the Makefile contains the patched text
            with open('Makefile') as mf:
                assert 'Patched!' in mf.read()
Ejemplo n.º 19
0
    def configure(self, spec, prefix):
        """Runs configure with the arguments specified in ``configure_args``
        and an appropriately set prefix
        """
        options = ['--prefix={0}'.format(prefix)] + self.configure_args()

        with working_dir(self.build_directory()):
            inspect.getmodule(self).configure(*options)
Ejemplo n.º 20
0
 def reset(self):
     with working_dir(self.stage.source_path):
         if spack.config.get('config:debug'):
             self.git('checkout', '.')
             self.git('clean', '-f')
         else:
             self.git('checkout', '--quiet', '.')
             self.git('clean', '--quiet', '-f')
Ejemplo n.º 21
0
    def test_symlinks_false(self, stage):
        """Test installing without symlink preservation."""

        with fs.working_dir(str(stage)):
            fs.install_tree('source', 'dest', symlinks=False)

            assert os.path.exists('dest/2')
            assert not os.path.islink('dest/2')
Ejemplo n.º 22
0
    def test_dir_dest(self, stage):
        """Test using a directory as the destination."""

        with fs.working_dir(str(stage)):
            fs.install('source/1', 'dest')

            assert os.path.exists('dest/1')
            check_added_exe_permissions('source/1', 'dest/1')
Ejemplo n.º 23
0
 def test_symlinks_true_ignore(self, stage):
     """Test copying when specifying relative paths that should be ignored
     """
     with fs.working_dir(str(stage)):
         ignore = lambda p: p in ['c/d/e', 'a']
         fs.copy_tree('source', 'dest', symlinks=True, ignore=ignore)
         assert not os.path.exists('dest/a')
         assert os.path.exists('dest/c/d')
         assert not os.path.exists('dest/c/d/e')
Ejemplo n.º 24
0
    def configure(self, spec, prefix):
        """Runs configure with the arguments specified in
        :py:meth:`~.AutotoolsPackage.configure_args`
        and an appropriately set prefix.
        """
        options = ['--prefix={0}'.format(prefix)] + self.configure_args()

        with working_dir(self.build_directory, create=True):
            inspect.getmodule(self).configure(*options)
Ejemplo n.º 25
0
Archivo: pkg.py Proyecto: LLNL/spack
def list_packages(rev):
    pkgpath = os.path.join(spack.paths.packages_path, 'packages')
    relpath = pkgpath[len(spack.paths.prefix + os.path.sep):] + os.path.sep

    git = which('git', required=True)
    with working_dir(spack.paths.prefix):
        output = git('ls-tree', '--full-tree', '--name-only', rev, relpath,
                     output=str)
    return sorted(line[len(relpath):] for line in output.split('\n') if line)
Ejemplo n.º 26
0
Archivo: pkg.py Proyecto: LLNL/spack
def pkg_add(args):
    for pkg_name in args.packages:
        filename = spack.repo.path.filename_for_package_name(pkg_name)
        if not os.path.isfile(filename):
            tty.die("No such package: %s.  Path does not exist:" %
                    pkg_name, filename)

        git = which('git', required=True)
        with working_dir(spack.paths.prefix):
            git('-C', spack.paths.packages_path, 'add', filename)
Ejemplo n.º 27
0
    def fetch(self):
        tty.msg("Getting go resource:", self.url)

        with working_dir(self.stage.path):
            try:
                os.mkdir('go')
            except OSError:
                pass
            env = dict(os.environ)
            env['GOPATH'] = os.path.join(os.getcwd(), 'go')
            self.go('get', '-v', '-d', self.url, env=env)
Ejemplo n.º 28
0
    def import_module_test(self):
        """Attempts to import the module that was just installed.

        This test is only run if the package overrides
        :py:attr:`import_modules` with a list of module names."""

        # Make sure we are importing the installed modules,
        # not the ones in the current directory
        with working_dir('..'):
            for module in self.import_modules:
                self.python('-c', 'import {0}'.format(module))
Ejemplo n.º 29
0
Archivo: cmake.py Proyecto: LLNL/spack
 def check(self):
     """Searches the CMake-generated Makefile for the target ``test``
     and runs it if found.
     """
     with working_dir(self.build_directory):
         if self.generator == 'Unix Makefiles':
             self._if_make_target_execute('test',
                                          jobs_env='CTEST_PARALLEL_LEVEL')
             self._if_make_target_execute('check')
         elif self.generator == 'Ninja':
             self._if_ninja_target_execute('test',
                                           jobs_env='CTEST_PARALLEL_LEVEL')
             self._if_ninja_target_execute('check')
Ejemplo n.º 30
0
    def reset(self):
        with working_dir(self.stage.path):
            source_path = self.stage.source_path
            scrubbed = "scrubbed-source-tmp"

            args = ['clone']
            if self.revision:
                args += ['-r', self.revision]
            args += [source_path, scrubbed]
            self.hg(*args)

            shutil.rmtree(source_path, ignore_errors=True)
            shutil.move(scrubbed, source_path)
Ejemplo n.º 31
0
 def autoreconf(self, spec, prefix):
     """Not needed usually, configure should be already there"""
     # If configure exists nothing needs to be done
     if os.path.exists(self.configure_abs_path):
         return
     # Else try to regenerate it
     autotools = ['m4', 'autoconf', 'automake', 'libtool']
     missing = [x for x in autotools if x not in spec]
     if missing:
         msg = 'Cannot generate configure: missing dependencies {0}'
         raise RuntimeError(msg.format(missing))
     tty.msg('Configure script not found: trying to generate it')
     tty.warn('*********************************************************')
     tty.warn('* If the default procedure fails, consider implementing *')
     tty.warn('*        a custom AUTORECONF phase in the package       *')
     tty.warn('*********************************************************')
     with working_dir(self.configure_directory):
         m = inspect.getmodule(self)
         # This line is what is needed most of the time
         # --install, --verbose, --force
         autoreconf_args = ['-ivf']
         autoreconf_args += self.autoreconf_search_path_args
         autoreconf_args += self.autoreconf_extra_args
         m.autoreconf(*autoreconf_args)
Ejemplo n.º 32
0
def test_gitsubmodules_delete(mock_git_repository, config, mutable_mock_repo,
                              monkeypatch):
    """
    Test GitFetchStrategy behavior with submodules_delete
    """
    type_of_test = 'tag-branch'
    t = mock_git_repository.checks[type_of_test]

    # Construct the package under test
    s = Spec('git-test').concretized()
    args = copy.copy(t.args)
    args['submodules'] = True
    args['submodules_delete'] = [
        'third_party/submodule0', 'third_party/submodule1'
    ]
    monkeypatch.setitem(s.package.versions, ver('git'), args)
    s.package.do_stage()
    with working_dir(s.package.stage.source_path):
        file_path = os.path.join(s.package.stage.source_path,
                                 'third_party/submodule0')
        assert not os.path.isdir(file_path)
        file_path = os.path.join(s.package.stage.source_path,
                                 'third_party/submodule1')
        assert not os.path.isdir(file_path)
Ejemplo n.º 33
0
def test_gitsubmodules_delete(mock_git_repository, config, mutable_mock_repo):
    """
    Test GitFetchStrategy behavior with submodules_delete
    """
    type_of_test = 'tag-branch'
    t = mock_git_repository.checks[type_of_test]

    # Construct the package under test
    spec = Spec('git-test')
    spec.concretize()
    pkg = spack.repo.get(spec)
    args = copy.copy(t.args)
    args['submodules'] = True
    args['submodules_delete'] = ['third_party/submodule0',
                                 'third_party/submodule1']
    pkg.versions[ver('git')] = args
    pkg.do_stage()
    with working_dir(pkg.stage.source_path):
        file_path = os.path.join(pkg.stage.source_path,
                                 'third_party/submodule0')
        assert not os.path.isdir(file_path)
        file_path = os.path.join(pkg.stage.source_path,
                                 'third_party/submodule1')
        assert not os.path.isdir(file_path)
Ejemplo n.º 34
0
    def archive(self, destination, **kwargs):
        assert (extension(destination) == 'tar.gz')
        assert (self.stage.source_path.startswith(self.stage.path))

        tar = which('tar', required=True)

        patterns = kwargs.get('exclude', None)
        if patterns is not None:
            if isinstance(patterns, string_types):
                patterns = [patterns]
            for p in patterns:
                tar.add_default_arg('--exclude=%s' % p)

        with working_dir(self.stage.path):
            if self.stage.srcdir:
                # Here we create an archive with the default repository name.
                # The 'tar' command has options for changing the name of a
                # directory that is included in the archive, but they differ
                # based on OS, so we temporarily rename the repo
                with temp_rename(self.stage.source_path, self.stage.srcdir):
                    tar('-czf', destination, self.stage.srcdir)
            else:
                tar('-czf', destination,
                    os.path.basename(self.stage.source_path))
Ejemplo n.º 35
0
def test(parser, args, unknown_args):
    if args.pytest_help:
        # make the pytest.main help output more accurate
        sys.argv[0] = 'spack test'
        return pytest.main(['-h'])

    # add back any parsed pytest args we need to pass to pytest
    pytest_args = add_back_pytest_args(args, unknown_args)

    # The default is to test the core of Spack. If the option `--extension`
    # has been used, then test that extension.
    pytest_root = spack.paths.test_path
    if args.extension:
        target = args.extension
        extensions = spack.config.get('config:extensions')
        pytest_root = spack.extensions.path_for_extension(target, *extensions)

    # pytest.ini lives in the root of the spack repository.
    with working_dir(pytest_root):
        if args.list:
            do_list(args, pytest_args)
            return

        return pytest.main(pytest_args)
Ejemplo n.º 36
0
def test_fetch(mock_archive, secure, checksum_type, config,
               refresh_builtin_mock):
    """Fetch an archive and make sure we can checksum it."""
    mock_archive.url
    mock_archive.path

    algo = crypto.hashes[checksum_type]()
    with open(mock_archive.archive_file, 'rb') as f:
        algo.update(f.read())
    checksum = algo.hexdigest()

    # Get a spec and tweak the test package with new chcecksum params
    spec = Spec('url-test')
    spec.concretize()

    pkg = spack.repo.get('url-test', new=True)
    pkg.url = mock_archive.url
    pkg.versions[ver('test')] = {checksum_type: checksum, 'url': pkg.url}
    pkg.spec = spec

    # Enter the stage directory and check some properties
    with pkg.stage:
        try:
            spack.insecure = secure
            pkg.do_stage()
        finally:
            spack.insecure = False

        with working_dir(pkg.stage.source_path):
            assert os.path.exists('configure')
            assert is_exe('configure')

            with open('configure') as f:
                contents = f.read()
            assert contents.startswith('#!/bin/sh')
            assert 'echo Building...' in contents
def create_db_tarball(args):
    tar = which('tar')
    tarball_name = "spack-db.%s.tar.gz" % _debug_tarball_suffix()
    tarball_path = os.path.abspath(tarball_name)

    base = os.path.basename(spack.store.root)
    transform_args = []
    if 'GNU' in tar('--version', output=str):
        transform_args = ['--transform', 's/^%s/%s/' % (base, tarball_name)]
    else:
        transform_args = ['-s', '/^%s/%s/' % (base, tarball_name)]

    wd = os.path.dirname(spack.store.root)
    with working_dir(wd):
        files = [spack.store.db._index_path]
        files += glob('%s/*/*/*/.spack/spec.yaml' % base)
        files = [os.path.relpath(f) for f in files]

        args = ['-czf', tarball_path]
        args += transform_args
        args += files
        tar(*args)

    tty.msg('Created %s' % tarball_name)
Ejemplo n.º 38
0
    def fetch(self):
        if self.archive_file:
            tty.msg("Already downloaded %s" % self.archive_file)
            return

        save_file = None
        partial_file = None
        if self.stage.save_filename:
            save_file = self.stage.save_filename
            partial_file = self.stage.save_filename + '.part'

        tty.msg("Fetching %s" % self.url)

        if partial_file:
            save_args = [
                '-C',
                '-',  # continue partial downloads
                '-o',
                partial_file
            ]  # use a .part file
        else:
            save_args = ['-O']

        curl_args = save_args + [
            '-f',  # fail on >400 errors
            '-D',
            '-',  # print out HTML headers
            '-L',  # resolve 3xx redirects
            self.url,
        ]

        if not spack.config.get('config:verify_ssl'):
            curl_args.append('-k')

        if sys.stdout.isatty():
            curl_args.append('-#')  # status bar when using a tty
        else:
            curl_args.append('-sS')  # just errors when not.

        curl_args += self.extra_curl_options

        # Run curl but grab the mime type from the http headers
        curl = self.curl
        with working_dir(self.stage.path):
            headers = curl(*curl_args, output=str, fail_on_error=False)

        if curl.returncode != 0:
            # clean up archive on failure.
            if self.archive_file:
                os.remove(self.archive_file)

            if partial_file and os.path.exists(partial_file):
                os.remove(partial_file)

            if curl.returncode == 22:
                # This is a 404.  Curl will print the error.
                raise FailedDownloadError(self.url,
                                          "URL %s was not found!" % self.url)

            elif curl.returncode == 60:
                # This is a certificate error.  Suggest spack -k
                raise FailedDownloadError(
                    self.url,
                    "Curl was unable to fetch due to invalid certificate. "
                    "This is either an attack, or your cluster's SSL "
                    "configuration is bad.  If you believe your SSL "
                    "configuration is bad, you can try running spack -k, "
                    "which will not check SSL certificates."
                    "Use this at your own risk.")

            else:
                # This is some other curl error.  Curl will print the
                # error, but print a spack message too
                raise FailedDownloadError(
                    self.url, "Curl failed with error %d" % curl.returncode)

        # Check if we somehow got an HTML file rather than the archive we
        # asked for.  We only look at the last content type, to handle
        # redirects properly.
        content_types = re.findall(r'Content-Type:[^\r\n]+',
                                   headers,
                                   flags=re.IGNORECASE)
        if content_types and 'text/html' in content_types[-1]:
            msg = ("The contents of {0} look like HTML. Either the URL "
                   "you are trying to use does not exist or you have an "
                   "internet gateway issue. You can remove the bad archive "
                   "using 'spack clean <package>', then try again using "
                   "the correct URL.")
            tty.warn(msg.format(self.archive_file or "the archive"))

        if save_file:
            os.rename(partial_file, save_file)

        if not self.archive_file:
            raise FailedDownloadError(self.url)
Ejemplo n.º 39
0
    def waf(self, *args, **kwargs):
        """Runs the waf ``Executable``."""
        jobs = inspect.getmodule(self).make_jobs

        with working_dir(self.build_directory):
            self.python('waf', '-j{0}'.format(jobs), *args, **kwargs)
Ejemplo n.º 40
0
 def check(self):
     """Default test : search the Makefile for the target `test`
     and run them if found.
     """
     with working_dir(self.build_directory()):
         self._if_make_target_execute('test')
Ejemplo n.º 41
0
 def install(self, spec, prefix):
     """...and the final `make install` after cmake"""
     with working_dir(self.build_directory()):
         inspect.getmodule(self).make('install')
Ejemplo n.º 42
0
 def build(self, spec, prefix):
     """The usual `make` after cmake"""
     with working_dir(self.build_directory()):
         inspect.getmodule(self).make()
Ejemplo n.º 43
0
 def install(self, spec, prefix):
     """Calls make, passing :py:attr:`~.MakefilePackage.install_targets`
     as targets.
     """
     with working_dir(self.build_directory):
         inspect.getmodule(self).make(*self.install_targets)
Ejemplo n.º 44
0
    def lookup_commit(self, commit):
        """Lookup the previous version and distance for a given commit.

        We use git to compare the known versions from package to the git tags,
        as well as any git tags that are SEMVER versions, and find the latest
        known version prior to the commit, as well as the distance from that version
        to the commit in the git repo. Those values are used to compare Version objects.
        """
        dest = os.path.join(spack.paths.user_repos_cache_path,
                            self.repository_uri)
        if dest.endswith('.git'):
            dest = dest[:-4]

        # prepare a cache for the repository
        dest_parent = os.path.dirname(dest)
        if not os.path.exists(dest_parent):
            mkdirp(dest_parent)

        # Only clone if we don't have it!
        if not os.path.exists(dest):
            self.fetcher.clone(dest, bare=True)

        # Lookup commit info
        with working_dir(dest):
            self.fetcher.git("fetch", '--tags')

            # Ensure commit is an object known to git
            # Note the brackets are literals, the commit replaces the format string
            # This will raise a ProcessError if the commit does not exist
            # We may later design a custom error to re-raise
            self.fetcher.git('cat-file', '-e', '%s^{commit}' % commit)

            # List tags (refs) by date, so last reference of a tag is newest
            tag_info = self.fetcher.git("for-each-ref",
                                        "--sort=creatordate",
                                        "--format",
                                        "%(objectname) %(refname)",
                                        "refs/tags",
                                        output=str).split('\n')

            # Lookup of commits to spack versions
            commit_to_version = {}

            for entry in tag_info:
                if not entry:
                    continue
                tag_commit, tag = entry.split()
                tag = tag.replace('refs/tags/', '', 1)

                # For each tag, try to match to a version
                for v in [v.string for v in self.pkg.versions]:
                    if v == tag or 'v' + v == tag:
                        commit_to_version[tag_commit] = v
                        break
                else:
                    # try to parse tag to copare versions spack does not know
                    match = SEMVER_REGEX.match(tag)
                    if match:
                        semver = match.groupdict()['semver']
                        commit_to_version[tag_commit] = semver

            ancestor_commits = []
            for tag_commit in commit_to_version:
                self.fetcher.git('merge-base',
                                 '--is-ancestor',
                                 tag_commit,
                                 commit,
                                 ignore_errors=[1])
                if self.fetcher.git.returncode == 0:
                    distance = self.fetcher.git('rev-list',
                                                '%s..%s' %
                                                (tag_commit, commit),
                                                '--count',
                                                output=str,
                                                error=str).strip()
                    ancestor_commits.append((tag_commit, int(distance)))

            # Get nearest ancestor that is a known version
            ancestor_commits.sort(key=lambda x: x[1])
            if ancestor_commits:
                prev_version_commit, distance = ancestor_commits[0]
                prev_version = commit_to_version[prev_version_commit]
            else:
                # Get list of all commits, this is in reverse order
                # We use this to get the first commit below
                commit_info = self.fetcher.git("log",
                                               "--all",
                                               "--pretty=format:%H",
                                               output=str)
                commits = [c for c in commit_info.split('\n') if c]

                # No previous version and distance from first commit
                prev_version = None
                distance = int(
                    self.fetcher.git('rev-list',
                                     '%s..%s' % (commits[-1], commit),
                                     '--count',
                                     output=str,
                                     error=str).strip())

        return prev_version, distance
Ejemplo n.º 45
0
Archivo: cli.py Proyecto: wangvsa/spack
def test_command(default_config, container_config_dir, capsys):
    with capsys.disabled():
        with fs.working_dir(container_config_dir):
            output = containerize()
    assert 'FROM spack/ubuntu-bionic' in output
Ejemplo n.º 46
0
    def install(self, spec, prefix):
        """Make the install targets"""

        with working_dir(self.build_directory):
            inspect.getmodule(self).make('install')
Ejemplo n.º 47
0
    def qmake(self, spec, prefix):
        """Run ``qmake`` to configure the project and generate a Makefile."""

        with working_dir(self.build_directory):
            inspect.getmodule(self).qmake(*self.qmake_args())
Ejemplo n.º 48
0
    def test_non_existing_src(self, stage):
        """Test using a non-existing source."""

        with fs.working_dir(str(stage)):
            with pytest.raises(IOError, match='No such file or directory'):
                fs.copy('source/none', 'dest')
Ejemplo n.º 49
0
 def reset(self):
     self._remove_untracked_files()
     with working_dir(self.stage.source_path):
         self.svn('revert', '.', '-R')
Ejemplo n.º 50
0
 def reset(self):
     with working_dir(self.stage.source_path):
         self.go('clean')
Ejemplo n.º 51
0
    def fetch(self):
        if self.stage.source_path:
            tty.msg("Already fetched %s" % self.stage.source_path)
            return

        args = ''
        if self.commit:
            args = 'at commit %s' % self.commit
        elif self.tag:
            args = 'at tag %s' % self.tag
        elif self.branch:
            args = 'on branch %s' % self.branch

        tty.msg("Cloning git repository: %s %s" % (self.url, args))

        git = self.git
        if self.commit:
            # Need to do a regular clone and check out everything if
            # they asked for a particular commit.
            with working_dir(self.stage.path):
                if spack.config.get('config:debug'):
                    git('clone', self.url)
                else:
                    git('clone', '--quiet', self.url)

            with working_dir(self.stage.source_path):
                if spack.config.get('config:debug'):
                    git('checkout', self.commit)
                else:
                    git('checkout', '--quiet', self.commit)

        else:
            # Can be more efficient if not checking out a specific commit.
            args = ['clone']
            if not spack.config.get('config:debug'):
                args.append('--quiet')

            # If we want a particular branch ask for it.
            if self.branch:
                args.extend(['--branch', self.branch])
            elif self.tag and self.git_version >= ver('1.8.5.2'):
                args.extend(['--branch', self.tag])

            # Try to be efficient if we're using a new enough git.
            # This checks out only one branch's history
            if self.git_version > ver('1.7.10'):
                args.append('--single-branch')

            with working_dir(self.stage.path):
                cloned = False
                # Yet more efficiency, only download a 1-commit deep tree
                if self.git_version >= ver('1.7.1'):
                    try:
                        git(*(args + ['--depth', '1', self.url]))
                        cloned = True
                    except spack.error.SpackError:
                        # This will fail with the dumb HTTP transport
                        # continue and try without depth, cleanup first
                        pass

                if not cloned:
                    args.append(self.url)
                    git(*args)

                with working_dir(self.stage.source_path):
                    # For tags, be conservative and check them out AFTER
                    # cloning.  Later git versions can do this with clone
                    # --branch, but older ones fail.
                    if self.tag and self.git_version < ver('1.8.5.2'):
                        # pull --tags returns a "special" error code of 1 in
                        # older versions that we have to ignore.
                        # see: https://github.com/git/git/commit/19d122b
                        if spack.config.get('config:debug'):
                            git('pull', '--tags', ignore_errors=1)
                            git('checkout', self.tag)
                        else:
                            git('pull', '--quiet', '--tags', ignore_errors=1)
                            git('checkout', '--quiet', self.tag)

        with working_dir(self.stage.source_path):
            # Init submodules if the user asked for them.
            if self.submodules:
                if spack.config.get('config:debug'):
                    git('submodule', 'update', '--init', '--recursive')
                else:
                    git('submodule', '--quiet', 'update', '--init',
                        '--recursive')
Ejemplo n.º 52
0
Archivo: maven.py Proyecto: eic/spack
    def install(self, spec, prefix):
        """Copy to installation prefix."""

        with working_dir(self.build_directory):
            install_tree('.', prefix)
Ejemplo n.º 53
0
    def setup_py(self, *args, **kwargs):
        setup = self.setup_file()

        with working_dir(self.build_directory):
            self.python('-s', setup, '--no-user-cfg', *args, **kwargs)
Ejemplo n.º 54
0
    def expand(self):
        if not self.expand_archive:
            tty.msg("Staging unexpanded archive %s in %s" %
                    (self.archive_file, self.stage.source_path))
            if not self.stage.expanded:
                mkdirp(self.stage.source_path)
            dest = os.path.join(self.stage.source_path,
                                os.path.basename(self.archive_file))
            # if the archive is a symlink itself, copy the target because
            # otherwise the symlink target might get modified by
            # staging-operations
            if os.path.islink(self.archive_file):
                shutil.copy(self.archive_file, dest)
            else:
                shutil.move(self.archive_file, dest)
            return

        tty.msg("Staging archive: %s" % self.archive_file)

        if not self.archive_file:
            raise NoArchiveFileError(
                "Couldn't find archive file",
                "Failed on expand() for URL %s" % self.url)

        if not self.extension:
            self.extension = extension(self.archive_file)

        if self.stage.expanded:
            tty.debug('Source already staged to %s' % self.stage.source_path)
            return

        decompress = decompressor_for(self.archive_file, self.extension)

        # Expand all tarballs in their own directory to contain
        # exploding tarballs.
        tarball_container = os.path.join(self.stage.path,
                                         "spack-expanded-archive")

        mkdirp(tarball_container)
        with working_dir(tarball_container):
            decompress(self.archive_file)

        # Check for an exploding tarball, i.e. one that doesn't expand to
        # a single directory.  If the tarball *didn't* explode, move its
        # contents to the staging source directory & remove the container
        # directory.  If the tarball did explode, just rename the tarball
        # directory to the staging source directory.
        #
        # NOTE: The tar program on Mac OS X will encode HFS metadata in
        # hidden files, which can end up *alongside* a single top-level
        # directory.  We initially ignore presence of hidden files to
        # accomodate these "semi-exploding" tarballs but ensure the files
        # are copied to the source directory.
        files = os.listdir(tarball_container)
        non_hidden = [f for f in files if not f.startswith('.')]
        if len(non_hidden) == 1:
            src = os.path.join(tarball_container, non_hidden[0])
            if os.path.isdir(src):
                self.stage.srcdir = non_hidden[0]
                shutil.move(src, self.stage.source_path)
                if len(files) > 1:
                    files.remove(non_hidden[0])
                    for f in files:
                        src = os.path.join(tarball_container, f)
                        dest = os.path.join(self.stage.path, f)
                        shutil.move(src, dest)
                os.rmdir(tarball_container)
            else:
                # This is a non-directory entry (e.g., a patch file) so simply
                # rename the tarball container to be the source path.
                shutil.move(tarball_container, self.stage.source_path)

        else:
            shutil.move(tarball_container, self.stage.source_path)
Ejemplo n.º 55
0
 def install(self, spec, prefix):
     """Makes the install targets specified by
     :py:attr:``~.AutotoolsPackage.install_targets``
     """
     with working_dir(self.build_directory):
         inspect.getmodule(self).make(*self.install_targets)
Ejemplo n.º 56
0
    def fetch(self):
        if self.stage.expanded:
            tty.msg("Already fetched {0}".format(self.stage.source_path))
            return

        tty.msg("Cloning git repository: {0}".format(self._repo_info()))

        git = self.git
        if self.commit:
            # Need to do a regular clone and check out everything if
            # they asked for a particular commit.
            debug = spack.config.get('config:debug')

            clone_args = ['clone', self.url]
            if not debug:
                clone_args.insert(1, '--quiet')
            with temp_cwd():
                git(*clone_args)
                repo_name = get_single_file('.')
                self.stage.srcdir = repo_name
                shutil.move(repo_name, self.stage.source_path)

            with working_dir(self.stage.source_path):
                checkout_args = ['checkout', self.commit]
                if not debug:
                    checkout_args.insert(1, '--quiet')
                git(*checkout_args)

        else:
            # Can be more efficient if not checking out a specific commit.
            args = ['clone']
            if not spack.config.get('config:debug'):
                args.append('--quiet')

            # If we want a particular branch ask for it.
            if self.branch:
                args.extend(['--branch', self.branch])
            elif self.tag and self.git_version >= ver('1.8.5.2'):
                args.extend(['--branch', self.tag])

            # Try to be efficient if we're using a new enough git.
            # This checks out only one branch's history
            if self.git_version >= ver('1.7.10'):
                if self.get_full_repo:
                    args.append('--no-single-branch')
                else:
                    args.append('--single-branch')

            with temp_cwd():
                # Yet more efficiency: only download a 1-commit deep
                # tree, if the in-use git and protocol permit it.
                if (not self.get_full_repo) and \
                   self.git_version >= ver('1.7.1') and \
                   self.protocol_supports_shallow_clone():
                    args.extend(['--depth', '1'])

                args.extend([self.url])
                git(*args)

                repo_name = get_single_file('.')
                self.stage.srcdir = repo_name
                shutil.move(repo_name, self.stage.source_path)

            with working_dir(self.stage.source_path):
                # For tags, be conservative and check them out AFTER
                # cloning.  Later git versions can do this with clone
                # --branch, but older ones fail.
                if self.tag and self.git_version < ver('1.8.5.2'):
                    # pull --tags returns a "special" error code of 1 in
                    # older versions that we have to ignore.
                    # see: https://github.com/git/git/commit/19d122b
                    pull_args = ['pull', '--tags']
                    co_args = ['checkout', self.tag]
                    if not spack.config.get('config:debug'):
                        pull_args.insert(1, '--quiet')
                        co_args.insert(1, '--quiet')

                    git(*pull_args, ignore_errors=1)
                    git(*co_args)

        # Init submodules if the user asked for them.
        if self.submodules:
            with working_dir(self.stage.source_path):
                args = ['submodule', 'update', '--init', '--recursive']
                if not spack.config.get('config:debug'):
                    args.insert(1, '--quiet')
                git(*args)
Ejemplo n.º 57
0
    def build(self, spec, prefix):
        """Make the build targets"""

        with working_dir(self.build_directory):
            inspect.getmodule(self).make()
Ejemplo n.º 58
0
 def build(self, spec, prefix):
     """Make the build targets"""
     options = ['-v']
     options += self.build_targets
     with working_dir(self.build_directory):
         inspect.getmodule(self).ninja(*options)
Ejemplo n.º 59
0
    def check(self):
        """Searches the Makefile for a ``check:`` target and runs it if found.
        """

        with working_dir(self.build_directory):
            self._if_make_target_execute('check')
Ejemplo n.º 60
0
def flake8(parser, args):
    flake8 = which('flake8', required=True)

    temp = tempfile.mkdtemp()
    try:
        file_list = args.files
        if file_list:

            def prefix_relative(path):
                return os.path.relpath(os.path.abspath(os.path.realpath(path)),
                                       spack.paths.prefix)

            file_list = [prefix_relative(p) for p in file_list]

        with working_dir(spack.paths.prefix):
            if not file_list:
                file_list = changed_files(args)

        print('=======================================================')
        print('flake8: running flake8 code checks on spack.')
        print()
        print('Modified files:')
        for filename in file_list:
            print('  {0}'.format(filename.strip()))
        print('=======================================================')

        # filter files into a temporary directory with exemptions added.
        for filename in file_list:
            src_path = os.path.join(spack.paths.prefix, filename)
            dest_path = os.path.join(temp, filename)
            filter_file(src_path, dest_path, args.output)

        # run flake8 on the temporary tree, once for core, once for pkgs
        package_file_list = [f for f in file_list if is_package(f)]
        file_list = [f for f in file_list if not is_package(f)]

        returncode = 0
        with working_dir(temp):
            output = ''
            if file_list:
                output += flake8('--format',
                                 'pylint',
                                 '--config=%s' %
                                 os.path.join(spack.paths.prefix, '.flake8'),
                                 *file_list,
                                 fail_on_error=False,
                                 output=str)
                returncode |= flake8.returncode
            if package_file_list:
                output += flake8(
                    '--format',
                    'pylint',
                    '--config=%s' %
                    os.path.join(spack.paths.prefix, '.flake8_packages'),
                    *package_file_list,
                    fail_on_error=False,
                    output=str)
                returncode |= flake8.returncode

        if args.root_relative:
            # print results relative to repo root.
            print(output)
        else:
            # print results relative to current working directory
            def cwd_relative(path):
                return '{0}: ['.format(
                    os.path.relpath(
                        os.path.join(spack.paths.prefix, path.group(1)),
                        os.getcwd()))

            for line in output.split('\n'):
                print(re.sub(r'^(.*): \[', cwd_relative, line))

        if returncode != 0:
            print('Flake8 found errors.')
            sys.exit(1)
        else:
            print('Flake8 checks were clean.')

    finally:
        if args.keep_temp:
            print('Temporary files are in: ', temp)
        else:
            shutil.rmtree(temp, ignore_errors=True)