Пример #1
0
    def test_setup_and_destroy_name_with_tmp(self):
        with use_tmp(True):
            stage = Stage(archive_url, name=stage_name)
            self.check_setup(stage, stage_name)

            stage.destroy()
            self.check_destroy(stage, stage_name)
Пример #2
0
    def test_setup_and_destroy_no_name_without_tmp(self):
        with use_tmp(False):
            stage = Stage(archive_url)
            self.check_setup(stage, None)

            stage.destroy()
            self.check_destroy(stage, None)
Пример #3
0
class InstallTest(MockPackagesTest):
    """Tests install and uninstall on a trivial package."""

    def setUp(self):
        super(InstallTest, self).setUp()

        self.stage = Stage('not_a_real_url')
        archive_dir = join_path(self.stage.path, dir_name)
        dummy_configure = join_path(archive_dir, 'configure')

        mkdirp(archive_dir)
        with closing(open(dummy_configure, 'w')) as configure:
            configure.write(
                "#!/bin/sh\n"
                "prefix=$(echo $1 | sed 's/--prefix=//')\n"
                "cat > Makefile <<EOF\n"
                "all:\n"
                "\techo Building...\n\n"
                "install:\n"
                "\tmkdir -p $prefix\n"
                "\ttouch $prefix/dummy_file\n"
                "EOF\n")
        os.chmod(dummy_configure, 0755)

        with working_dir(self.stage.path):
            tar = which('tar')
            tar('-czf', archive_name, dir_name)

        # We use a fake pacakge, so skip the checksum.
        spack.do_checksum = False

    def tearDown(self):
        super(InstallTest, self).tearDown()

        if self.stage is not None:
            self.stage.destroy()

        # Turn checksumming back on
        spack.do_checksum = True


    def test_install_and_uninstall(self):
        # Get a basic concrete spec for the trivial install package.
        spec = Spec(install_test_package)
        spec.concretize()
        self.assertTrue(spec.concrete)

        # Get the package
        pkg = spack.db.get(spec)

        # Fake the URL for the package so it downloads from a file.
        archive_path = join_path(self.stage.path, archive_name)
        pkg.url = 'file://' + archive_path

        try:
            pkg.do_install()
            pkg.do_uninstall()
        except Exception, e:
            pkg.remove_prefix()
            raise
Пример #4
0
    def checkSetupAndDestroy(self, stage_name=None):
        stage = Stage(archive_url, name=stage_name)
        stage.setup()
        self.check_setup(stage, stage_name)

        stage.destroy()
        self.check_destroy(stage, stage_name)
Пример #5
0
    def test_chdir(self):
        stage = Stage(archive_url, name=stage_name)

        stage.chdir()
        self.check_setup(stage, stage_name)
        self.check_chdir(stage, stage_name)

        stage.destroy()
        self.check_destroy(stage, stage_name)
Пример #6
0
class MockRepo(object):
    def __init__(self, stage_name, repo_name):
        """This creates a stage where some archive/repo files can be staged
           for testing spack's fetch strategies."""
        # Stage where this repo has been created
        self.stage = Stage(stage_name)

        # Full path to the repo within the stage.
        self.path = join_path(self.stage.path, repo_name)
        mkdirp(self.path)


    def destroy(self):
        """Destroy resources associated with this mock repo."""
        if self.stage:
            self.stage.destroy()
Пример #7
0
    def setUp(self):
        super(InstallTest, self).setUp()

        self.stage = Stage('not_a_real_url')
        archive_dir = join_path(self.stage.path, dir_name)
        dummy_configure = join_path(archive_dir, 'configure')

        mkdirp(archive_dir)
        with closing(open(dummy_configure, 'w')) as configure:
            configure.write(
                "#!/bin/sh\n"
                "prefix=$(echo $1 | sed 's/--prefix=//')\n"
                "cat > Makefile <<EOF\n"
                "all:\n"
                "\techo Building...\n\n"
                "install:\n"
                "\tmkdir -p $prefix\n"
                "\ttouch $prefix/dummy_file\n"
                "EOF\n")
        os.chmod(dummy_configure, 0755)

        with working_dir(self.stage.path):
            tar = which('tar')
            tar('-czf', archive_name, dir_name)

        # We use a fake pacakge, so skip the checksum.
        spack.do_checksum = False
Пример #8
0
    def setUp(self):
        super(InstallTest, self).setUp()

        self.stage = Stage('not_a_real_url')
        archive_dir = join_path(self.stage.path, dir_name)
        dummy_configure = join_path(archive_dir, 'configure')

        mkdirp(archive_dir)
        with closing(open(dummy_configure, 'w')) as configure:
            configure.write(
                "#!/bin/sh\n"
                "prefix=$(echo $1 | sed 's/--prefix=//')\n"
                "cat > Makefile <<EOF\n"
                "all:\n"
                "\techo Building...\n\n"
                "install:\n"
                "\tmkdir -p $prefix\n"
                "\ttouch $prefix/dummy_file\n"
                "EOF\n")
        os.chmod(dummy_configure, 0755)

        with working_dir(self.stage.path):
            tar = which('tar')
            tar('-czf', archive_name, dir_name)

        # We use a fake package, so skip the checksum.
        spack.do_checksum = False

        # Use a fake install directory to avoid conflicts bt/w
        # installed pkgs and mock packages.
        self.tmpdir = tempfile.mkdtemp()
        self.orig_layout = spack.install_layout
        spack.install_layout = SpecHashDirectoryLayout(self.tmpdir)
Пример #9
0
class InstallTest(unittest.TestCase):
    """Tests the configure guesser in spack create"""

    def setUp(self):
        self.tar = which('tar')
        self.tmpdir = tempfile.mkdtemp()
        self.orig_dir = os.getcwd()
        os.chdir(self.tmpdir)
        self.stage = None


    def tearDown(self):
        shutil.rmtree(self.tmpdir, ignore_errors=True)
        if self.stage:
            self.stage.destroy()
        os.chdir(self.orig_dir)


    def check_archive(self, filename, system):
        mkdirp('archive')
        touch(join_path('archive', filename))
        self.tar('czf', 'archive.tar.gz', 'archive')

        url = 'file://' + join_path(os.getcwd(), 'archive.tar.gz')
        print url
        self.stage = Stage(url)
        self.stage.fetch()

        guesser = ConfigureGuesser()
        guesser(self.stage)
        self.assertEqual(system, guesser.build_system)


    def test_python(self):
        self.check_archive('setup.py', 'python')


    def test_autotools(self):
        self.check_archive('configure', 'autotools')


    def test_cmake(self):
        self.check_archive('CMakeLists.txt', 'cmake')


    def test_unknown(self):
        self.check_archive('foobar', 'unknown')
Пример #10
0
def stage():
    """Creates a stage with the directory structure for the tests."""
    s = Stage('link-tree-test')
    s.create()

    with working_dir(s.path):
        touchp('source/1')
        touchp('source/a/b/2')
        touchp('source/a/b/3')
        touchp('source/c/4')
        touchp('source/c/d/5')
        touchp('source/c/d/6')
        touchp('source/c/d/e/7')

    yield s

    s.destroy()
Пример #11
0
    def test_expand_archive(self):
        stage = Stage(archive_url, name=stage_name)

        stage.fetch()
        self.check_setup(stage, stage_name)
        self.check_fetch(stage, stage_name)

        stage.expand_archive()
        stage.chdir_to_archive()
        self.check_expand_archive(stage, stage_name)
        self.check_chdir_to_archive(stage, stage_name)

        stage.destroy()
        self.check_destroy(stage, stage_name)
Пример #12
0
    def __init__(self, stage_name, repo_name):
        """This creates a stage where some archive/repo files can be staged
           for testing spack's fetch strategies."""
        # Stage where this repo has been created
        self.stage = Stage(stage_name)

        # Full path to the repo within the stage.
        self.path = join_path(self.stage.path, repo_name)
        mkdirp(self.path)
Пример #13
0
def test_url_extra_fetch(tmpdir, mock_archive):
    """Ensure a fetch after downloading is effectively a no-op."""
    testpath = str(tmpdir)

    fetcher = fs.URLFetchStrategy(mock_archive.url)
    with Stage(fetcher, path=testpath) as stage:
        assert fetcher.archive_file is None
        stage.fetch()
        assert fetcher.archive_file is not None
        fetcher.fetch()
Пример #14
0
def test_url_extra_fetch(tmpdir, mock_archive, use_curl):
    """Ensure a fetch after downloading is effectively a no-op."""
    with spack.config.override('config:use_curl', use_curl):
        testpath = str(tmpdir)
        fetcher = fs.URLFetchStrategy(mock_archive.url)
        with Stage(fetcher, path=testpath) as stage:
            assert fetcher.archive_file is None
            stage.fetch()
            assert fetcher.archive_file is not None
            fetcher.fetch()
Пример #15
0
def test_fetch_missing_cache(tmpdir, _fetch_method):
    """Ensure raise a missing cache file."""
    testpath = str(tmpdir)
    with spack.config.override('config:url_fetch_method', _fetch_method):
        abs_pref = '' if is_windows else '/'
        url = 'file://' + abs_pref + 'not-a-real-cache-file'
        fetcher = CacheURLFetchStrategy(url=url)
        with Stage(fetcher, path=testpath):
            with pytest.raises(NoCacheError, match=r'No cache'):
                fetcher.fetch()
Пример #16
0
def get_keys(install=False, trust=False, force=False, mirrors=None):
    """Get pgp public keys available on mirror with suffix .pub
    """
    mirror_collection = (mirrors or spack.mirror.MirrorCollection())

    if not mirror_collection:
        tty.die("Please add a spack mirror to allow " +
                "download of build caches.")

    for mirror in mirror_collection.values():
        fetch_url = mirror.fetch_url
        keys_url = url_util.join(fetch_url, _build_cache_relative_path,
                                 _build_cache_keys_relative_path)
        keys_index = url_util.join(keys_url, 'index.json')

        tty.debug('Finding public keys in {0}'.format(
            url_util.format(fetch_url)))

        try:
            _, _, json_file = web_util.read_from_url(keys_index)
            json_index = sjson.load(codecs.getreader('utf-8')(json_file))
        except (URLError, web_util.SpackWebError) as url_err:
            if web_util.url_exists(keys_index):
                err_msg = [
                    'Unable to find public keys in {0},',
                    ' caught exception attempting to read from {1}.',
                ]

                tty.error(''.join(err_msg).format(url_util.format(fetch_url),
                                                  url_util.format(keys_index)))

                tty.debug(url_err)

            continue

        for fingerprint, key_attributes in json_index['keys'].items():
            link = os.path.join(keys_url, fingerprint + '.pub')

            with Stage(link, name="build_cache", keep=True) as stage:
                if os.path.exists(stage.save_filename) and force:
                    os.remove(stage.save_filename)
                if not os.path.exists(stage.save_filename):
                    try:
                        stage.fetch()
                    except fs.FetchError:
                        continue

            tty.debug('Found key {0}'.format(fingerprint))
            if install:
                if trust:
                    spack.util.gpg.trust(stage.save_filename)
                    tty.debug('Added this key to trusted keys.')
                else:
                    tty.debug('Will not add this key to trusted keys.'
                              'Use -t to install all downloaded keys')
Пример #17
0
 def test_no_search_mirror_only(
         self, failing_fetch_strategy, failing_search_fn):
     stage = Stage(failing_fetch_strategy,
                   name=self.stage_name,
                   search_fn=failing_search_fn)
     with stage:
         try:
             stage.fetch(mirror_only=True)
         except spack.fetch_strategy.FetchError:
             pass
     check_destroy(stage, self.stage_name)
Пример #18
0
def download_tarball(spec):
    """
    Download binary tarball for given package into stage area
    Return True if successful
    """
    mirrors = spack.config.get('mirrors')
    if len(mirrors) == 0:
        tty.die("Please add a spack mirror to allow " +
                "download of pre-compiled packages.")
    tarball = tarball_path_name(spec, '.spack')
    for key in mirrors:
        url = mirrors[key] + "/build_cache/" + tarball
        # stage the tarball into standard place
        stage = Stage(url, name="build_cache", keep=True)
        try:
            stage.fetch()
            return stage.save_filename
        except fs.FetchError:
            continue
    return None
Пример #19
0
 def test_search_if_default_fails(self, failing_fetch_strategy, search_fn):
     stage = Stage(failing_fetch_strategy,
                   name=self.stage_name,
                   search_fn=search_fn)
     with stage:
         try:
             stage.fetch(mirror_only=False)
         except spack.fetch_strategy.FetchError:
             pass
     check_destroy(stage, self.stage_name)
     assert search_fn.performed_search
Пример #20
0
def get_specs(force=False):
    """
    Get spec.yaml's for build caches available on mirror
    """
    global _cached_specs

    if _cached_specs:
        tty.debug("Using previously-retrieved specs")
        return _cached_specs

    mirrors = spack.config.get('mirrors')
    if len(mirrors) == 0:
        tty.warn("No Spack mirrors are currently configured")
        return {}

    path = str(spack.architecture.sys_type())
    urls = set()
    for mirror_name, mirror_url in mirrors.items():
        if mirror_url.startswith('file'):
            mirror = mirror_url.replace(
                'file://', '') + "/" + _build_cache_relative_path
            tty.msg("Finding buildcaches in %s" % mirror)
            if os.path.exists(mirror):
                files = os.listdir(mirror)
                for file in files:
                    if re.search('spec.yaml', file):
                        link = 'file://' + mirror + '/' + file
                        urls.add(link)
        else:
            tty.msg("Finding buildcaches on %s" % mirror_url)
            p, links = spider(mirror_url + "/" + _build_cache_relative_path)
            for link in links:
                if re.search("spec.yaml", link) and re.search(path, link):
                    urls.add(link)

    _cached_specs = []
    for link in urls:
        with Stage(link, name="build_cache", keep=True) as stage:
            if force and os.path.exists(stage.save_filename):
                os.remove(stage.save_filename)
            if not os.path.exists(stage.save_filename):
                try:
                    stage.fetch()
                except fs.FetchError:
                    continue
            with open(stage.save_filename, 'r') as f:
                # read the spec from the build cache file. All specs
                # in build caches are concrete (as they are built) so
                # we need to mark this spec concrete on read-in.
                spec = Spec.from_yaml(f)
                spec._mark_concrete()
                _cached_specs.append(spec)

    return _cached_specs
Пример #21
0
    def test_keep_exceptions(self, mock_archive):
        class ThisMustFailHere(Exception):
            pass

        try:
            with Stage(mock_archive.url, name=self.stage_name,
                       keep=True) as stage:
                raise ThisMustFailHere()
        except ThisMustFailHere:
            path = get_stage_path(stage, self.stage_name)
            assert os.path.isdir(path)
def download_tarball(spec):
    """
    Download binary tarball for given package into stage area
    Return True if successful
    """
    mirrors = spack.config.get('mirrors')
    if len(mirrors) == 0:
        tty.die("Please add a spack mirror to allow " +
                "download of pre-compiled packages.")
    tarball = tarball_path_name(spec, '.spack')
    for mirror_name, mirror_url in mirrors.items():
        url = mirror_url + '/' + _build_cache_relative_path + '/' + tarball
        # stage the tarball into standard place
        stage = Stage(url, name="build_cache", keep=True)
        try:
            stage.fetch()
            return stage.save_filename
        except fs.FetchError:
            continue
    return None
Пример #23
0
 def test_noexpand_stage_file(
         self, mock_stage_archive, mock_noexpand_resource):
     """When creating a stage with a nonexpanding URL, the 'archive_file'
     property of the stage should refer to the path of that file.
     """
     test_noexpand_fetcher = spack.fetch_strategy.from_kwargs(
         url='file://' + mock_noexpand_resource, expand=False)
     with Stage(test_noexpand_fetcher) as stage:
         stage.fetch()
         stage.expand_archive()
         assert os.path.exists(stage.archive_file)
Пример #24
0
 def test_expand_archive_extra_expand(self, mock_stage_archive):
     """Test expand with an extra expand after expand (i.e., no-op)."""
     archive = mock_stage_archive()
     with Stage(archive.url, name=self.stage_name) as stage:
         stage.fetch()
         check_setup(stage, self.stage_name, archive)
         check_fetch(stage, self.stage_name)
         stage.expand_archive()
         stage.fetcher.expand()
         check_expand_archive(stage, self.stage_name, [_include_readme])
     check_destroy(stage, self.stage_name)
Пример #25
0
def test_urlfetchstrategy_bad_url(tmpdir):
    """Ensure fetch with bad URL fails as expected."""
    testpath = str(tmpdir)

    with pytest.raises(FailedDownloadError):
        fetcher = URLFetchStrategy(url='file:///does-not-exist')
        assert fetcher is not None

        with Stage(fetcher, path=testpath) as stage:
            assert stage is not None
            assert fetcher.archive_file is None
            fetcher.fetch()
Пример #26
0
def test_fetch_options(tmpdir, mock_archive):
    testpath = str(tmpdir)
    with spack.config.override('config:url_fetch_method', 'curl'):
        fetcher = fs.URLFetchStrategy(url=mock_archive.url,
                                      fetch_options={'cookie': 'True',
                                                     'timeout': 10})
        assert fetcher is not None

        with Stage(fetcher, path=testpath) as stage:
            assert stage is not None
            assert fetcher.archive_file is None
            fetcher.fetch()
Пример #27
0
def get_checksums(versions, urls, **kwargs):
    # Allow commands like create() to do some analysis on the first
    # archive after it is downloaded.
    first_stage_function = kwargs.get('first_stage_function', None)

    tty.msg("Downloading...")
    hashes = []
    for i, (url, version) in enumerate(zip(urls, versions)):
        stage = Stage(url)
        try:
            stage.fetch()
            if i == 0 and first_stage_function:
                first_stage_function(stage)

            hashes.append(
                spack.util.crypto.checksum(hashlib.md5, stage.archive_file))
        except FailedDownloadError, e:
            tty.msg("Failed to fetch %s" % url)
            continue

        finally:
Пример #28
0
def test_fetch(tmpdir):
    """Ensure a fetch after expanding is effectively a no-op."""
    testpath = str(tmpdir)
    cache = os.path.join(testpath, 'cache.tar.gz')
    touch(cache)
    url = 'file:///{0}'.format(cache)

    fetcher = CacheURLFetchStrategy(url=url)
    with Stage(fetcher, path=testpath) as stage:
        source_path = stage.source_path
        mkdirp(source_path)
        fetcher.fetch()
Пример #29
0
def test_urlfetchstrategy_bad_url(tmpdir, _fetch_method):
    """Ensure fetch with bad URL fails as expected."""
    testpath = str(tmpdir)
    with spack.config.override('config:url_fetch_method', _fetch_method):
        with pytest.raises(fs.FailedDownloadError):
            fetcher = fs.URLFetchStrategy(url='file:///does-not-exist')
            assert fetcher is not None

            with Stage(fetcher, path=testpath) as stage:
                assert stage is not None
                assert fetcher.archive_file is None
                fetcher.fetch()
Пример #30
0
def _download_buildcache_entry(mirror_root, descriptions):
    for description in descriptions:
        description_url = os.path.join(mirror_root, description['url'])
        path = description['path']
        fail_if_missing = description['required']

        mkdirp(path)

        stage = Stage(
            description_url, name="build_cache", path=path, keep=True)

        try:
            stage.fetch()
        except fs.FetchError as e:
            tty.debug(e)
            if fail_if_missing:
                tty.error('Failed to download required url {0}'.format(
                    description_url))
                return False

    return True
Пример #31
0
def download_tarball(spec):
    """
    Download binary tarball for given package into stage area
    Return True if successful
    """
    if not spack.mirror.MirrorCollection():
        tty.die("Please add a spack mirror to allow " +
                "download of pre-compiled packages.")

    tarball = tarball_path_name(spec, '.spack')

    for mirror in spack.mirror.MirrorCollection().values():
        url = url_util.join(mirror.fetch_url, _build_cache_relative_path,
                            tarball)

        # stage the tarball into standard place
        stage = Stage(url, name="build_cache", keep=True)
        stage.create()
        try:
            stage.fetch()
            return stage.save_filename
        except fs.FetchError:
            continue

    return None
Пример #32
0
def get_keys(install=False, trust=False, force=False):
    """
    Get pgp public keys available on mirror
    with suffix .key or .pub
    """
    if not spack.mirror.MirrorCollection():
        tty.die("Please add a spack mirror to allow " +
                "download of build caches.")

    keys = set()

    for mirror in spack.mirror.MirrorCollection().values():
        fetch_url_build_cache = url_util.join(mirror.fetch_url,
                                              _build_cache_relative_path)

        mirror_dir = url_util.local_file_path(fetch_url_build_cache)
        if mirror_dir:
            tty.msg("Finding public keys in %s" % mirror_dir)
            files = os.listdir(str(mirror_dir))
            for file in files:
                if re.search(r'\.key', file) or re.search(r'\.pub', file):
                    link = url_util.join(fetch_url_build_cache, file)
                    keys.add(link)
        else:
            tty.msg("Finding public keys at %s" %
                    url_util.format(fetch_url_build_cache))
            # For s3 mirror need to request index.html directly
            p, links = web_util.spider(url_util.join(fetch_url_build_cache,
                                                     'index.html'),
                                       depth=1)

            for link in links:
                if re.search(r'\.key', link) or re.search(r'\.pub', link):
                    keys.add(link)

        for link in keys:
            with Stage(link, name="build_cache", keep=True) as stage:
                if os.path.exists(stage.save_filename) and force:
                    os.remove(stage.save_filename)
                if not os.path.exists(stage.save_filename):
                    try:
                        stage.fetch()
                    except fs.FetchError:
                        continue
            tty.msg('Found key %s' % link)
            if install:
                if trust:
                    Gpg.trust(stage.save_filename)
                    tty.msg('Added this key to trusted keys.')
                else:
                    tty.msg('Will not add this key to trusted keys.'
                            'Use -t to install all downloaded keys')
Пример #33
0
    def install(self, spec, prefix):

        build_dir = join_path(self.stage.source_path, "builddir")
        os.mkdir(build_dir)

        stage1 = Stage("https://github.com/ncbi/ngs/archive/1.1.3.tar.gz") # Download tarball. fetch() doesn't work with git url?
        stage1.fetch()
        stage1.expand_archive()

        with working_dir(stage1.source_path):
            ngs_build = join_path(stage1.source_path,"builddir")
            os.mkdir(ngs_build)
            configure("--build-prefix="+ ngs_build, "--prefix=" + prefix)
            make("ngs-sdk")
            make("ngs-python")
            make("ngs-java")
            configure("--build-prefix=" + ngs_build, "--with-ngs-sdk=" + prefix, "--prefix=" + prefix)
            make("ngs-bam")
            make("install")


        stage2 = Stage("https://github.com/ncbi/ncbi-vdb/archive/2.5.2.tar.gz")
        stage2.fetch()
        stage2.expand_archive()

        with working_dir(stage2.source_path):
            vdb_build = join_path(stage2.source_path,"builddir")
            os.mkdir(vdb_build)
            configure("--build-prefix=" + vdb_build,"--with-ngs-sdk-prefix=" + prefix, "--with-ngs-java-prefix=" + prefix, "--prefix=" + prefix)
            make()
            make("install")

        # sra-tools build block
        with working_dir(self.stage.source_path):
            configure("--build-prefix=" + build_dir,
                    "--with-ngs-sdk-prefix=" + prefix, "--with-ncbi-vdb-build=" + vdb_build, "--with-ncbi-vdb-sources=" + stage2.source_path,
                    "--prefix=" + prefix)
            make()
            make("install")
Пример #34
0
def get_checksums(versions, urls, **kwargs):
    # Allow commands like create() to do some analysis on the first
    # archive after it is downloaded.
    first_stage_function = kwargs.get('first_stage_function', None)
    keep_stage = kwargs.get('keep_stage', False)

    tty.msg("Downloading...")
    hashes = []
    for i, (url, version) in enumerate(zip(urls, versions)):
        stage = Stage(url)
        try:
            stage.fetch()
            if i == 0 and first_stage_function:
                first_stage_function(stage)

            hashes.append(
                spack.util.crypto.checksum(hashlib.md5, stage.archive_file))
        except FailedDownloadError, e:
            tty.msg("Failed to fetch %s" % url)
            continue

        finally:
Пример #35
0
def test_stage_create_replace_path(tmp_build_stage_dir):
    """Ensure stage creation replaces a non-directory path."""
    _, test_stage_path = tmp_build_stage_dir
    test_stage_path.ensure(dir=True)

    nondir = test_stage_path.join('afile')
    nondir.ensure(dir=False)
    path = str(nondir)

    stage = Stage(path, name='')
    stage.create()  # Should ensure the path is converted to a dir

    assert os.path.isdir(stage.path)
Пример #36
0
    def check_archive(self, filename, system):
        mkdirp('archive')
        touch(join_path('archive', filename))
        self.tar('czf', 'archive.tar.gz', 'archive')

        url = 'file://' + join_path(os.getcwd(), 'archive.tar.gz')
        print url
        self.stage = Stage(url)
        self.stage.fetch()

        guesser = ConfigureGuesser()
        guesser(self.stage)
        self.assertEqual(system, guesser.build_system)
Пример #37
0
    def test_no_keep_with_exceptions(self, mock_stage_archive):
        class ThisMustFailHere(Exception):
            pass

        archive = mock_stage_archive()
        stage = Stage(archive.url, name=self.stage_name, keep=False)
        try:
            with stage:
                raise ThisMustFailHere()

        except ThisMustFailHere:
            path = get_stage_path(stage, self.stage_name)
            assert os.path.isdir(path)
Пример #38
0
    def test_search_if_default_fails(self, failing_fetch_strategy, search_fn,
                                     err_msg, expected):
        stage = Stage(failing_fetch_strategy,
                      name=self.stage_name,
                      search_fn=search_fn)

        with stage:
            with pytest.raises(spack.fetch_strategy.FetchError,
                               match=expected):
                stage.fetch(mirror_only=False, err_msg=err_msg)

        check_destroy(stage, self.stage_name)
        assert search_fn.performed_search
Пример #39
0
    def check_archive(self, filename, system):
        mkdirp('archive')
        touch(join_path('archive', filename))
        self.tar('czf', 'archive.tar.gz', 'archive')

        url = 'file://' + join_path(os.getcwd(), 'archive.tar.gz')
        print url
        with Stage(url) as stage:
            stage.fetch()

            guesser = ConfigureGuesser()
            guesser(stage)
            self.assertEqual(system, guesser.build_system)
Пример #40
0
def check_mirror():
    with Stage('spack-mirror-test') as stage:
        mirror_root = os.path.join(stage.path, 'test-mirror')
        # register mirror with spack config
        mirrors = {'spack-mirror-test': 'file://' + mirror_root}
        with spack.config.override('mirrors', mirrors):
            with spack.config.override('config:checksum', False):
                specs = [Spec(x).concretized() for x in repos]
                spack.mirror.create(mirror_root, specs)

            # Stage directory exists
            assert os.path.isdir(mirror_root)

            for spec in specs:
                fetcher = spec.package.fetcher[0]
                per_package_ref = os.path.join(
                    spec.name, '-'.join([spec.name, str(spec.version)]))
                mirror_paths = spack.mirror.mirror_archive_paths(
                    fetcher,
                    per_package_ref)
                expected_path = os.path.join(
                    mirror_root, mirror_paths.storage_path)
                assert os.path.exists(expected_path)

            # Now try to fetch each package.
            for name, mock_repo in repos.items():
                spec = Spec(name).concretized()
                pkg = spec.package

                with spack.config.override('config:checksum', False):
                    with pkg.stage:
                        pkg.do_stage(mirror_only=True)

                        # Compare the original repo with the expanded archive
                        original_path = mock_repo.path
                        if 'svn' in name:
                            # have to check out the svn repo to compare.
                            original_path = os.path.join(
                                mock_repo.path, 'checked_out')

                            svn = which('svn', required=True)
                            svn('checkout', mock_repo.url, original_path)

                        dcmp = filecmp.dircmp(
                            original_path, pkg.stage.source_path)

                        # make sure there are no new files in the expanded
                        # tarball
                        assert not dcmp.right_only
                        # and that all original files are present.
                        assert all(left in exclude for left in dcmp.left_only)
Пример #41
0
def test_stage_create_replace_path(tmp_build_stage_dir):
    """Ensure stage creation replaces a non-directory path."""
    _, test_stage_path = tmp_build_stage_dir
    mkdirp(test_stage_path)

    nondir = os.path.join(test_stage_path, 'afile')
    touch(nondir)
    path = str(nondir)

    stage = Stage(path, name='')
    stage.create()

    # Ensure the stage path is "converted" to a directory
    assert os.path.isdir(stage.path)
Пример #42
0
    def setUp(self):
        self.stage = Stage('link-tree-test')

        with working_dir(self.stage.path):
            touchp('source/1')
            touchp('source/a/b/2')
            touchp('source/a/b/3')
            touchp('source/c/4')
            touchp('source/c/d/5')
            touchp('source/c/d/6')
            touchp('source/c/d/e/7')

        source_path = os.path.join(self.stage.path, 'source')
        self.link_tree = LinkTree(source_path)
Пример #43
0
def test_svn_extra_fetch(tmpdir):
    """Ensure a fetch after downloading is effectively a no-op."""
    testpath = str(tmpdir)

    fetcher = SvnFetchStrategy(svn='file:///not-a-real-svn-repo')
    assert fetcher is not None

    with Stage(fetcher, path=testpath) as stage:
        assert stage is not None

        source_path = stage.source_path
        mkdirp(source_path)

        fetcher.fetch()
Пример #44
0
    def test_chdir(self):
        stage = Stage(archive_url, name=stage_name)

        stage.chdir()
        self.check_setup(stage, stage_name)
        self.check_chdir(stage, stage_name)

        stage.destroy()
        self.check_destroy(stage, stage_name)
Пример #45
0
def check_mirror():
    with Stage('spack-mirror-test') as stage:
        mirror_root = join_path(stage.path, 'test-mirror')
        # register mirror with spack config
        mirrors = {'spack-mirror-test': 'file://' + mirror_root}
        spack.config.update_config('mirrors', mirrors)

        os.chdir(stage.path)
        spack.mirror.create(mirror_root, repos, no_checksum=True)

        # Stage directory exists
        assert os.path.isdir(mirror_root)

        # check that there are subdirs for each package
        for name in repos:
            subdir = join_path(mirror_root, name)
            assert os.path.isdir(subdir)

            files = os.listdir(subdir)
            assert len(files) == 1

            # Now try to fetch each package.
            for name, mock_repo in repos.items():
                spec = Spec(name).concretized()
                pkg = spec.package

                saved_checksum_setting = spack.do_checksum
                with pkg.stage:
                    # Stage the archive from the mirror and cd to it.
                    spack.do_checksum = False
                    pkg.do_stage(mirror_only=True)

                    # Compare the original repo with the expanded archive
                    original_path = mock_repo.path
                    if 'svn' in name:
                        # have to check out the svn repo to compare.
                        original_path = join_path(mock_repo.path,
                                                  'checked_out')

                        svn = which('svn', required=True)
                        svn('checkout', mock_repo.url, original_path)

                    dcmp = filecmp.dircmp(original_path, pkg.stage.source_path)
                    # make sure there are no new files in the expanded
                    # tarball
                    assert not dcmp.right_only
                    # and that all original files are present.
                    assert all(l in exclude for l in dcmp.left_only)
                    spack.do_checksum = saved_checksum_setting
Пример #46
0
def test_fetch(tmpdir, _fetch_method):
    """Ensure a fetch after expanding is effectively a no-op."""
    testpath = str(tmpdir)
    cache = os.path.join(testpath, 'cache.tar.gz')
    touch(cache)
    if is_windows:
        url_stub = '{0}'
    else:
        url_stub = '/{0}'
    url = 'file://' + url_stub.format(cache)
    with spack.config.override('config:url_fetch_method', _fetch_method):
        fetcher = CacheURLFetchStrategy(url=url)
        with Stage(fetcher, path=testpath) as stage:
            source_path = stage.source_path
            mkdirp(source_path)
            fetcher.fetch()
Пример #47
0
def composite_stage_with_expanding_resource(
        mock_archive, mock_expand_resource):
    composite_stage = StageComposite()
    root_stage = Stage(mock_archive.url)
    composite_stage.append(root_stage)

    test_resource_fetcher = spack.fetch_strategy.from_kwargs(
        url=mock_expand_resource.url)
    # Specify that the resource files are to be placed in the 'resource-dir'
    # directory
    test_resource = Resource(
        'test_resource', test_resource_fetcher, '', 'resource-dir')
    resource_stage = ResourceStage(
        test_resource_fetcher, root_stage, test_resource)
    composite_stage.append(resource_stage)
    return composite_stage, root_stage, resource_stage
Пример #48
0
class LinkTreeTest(unittest.TestCase):
    """Tests Spack's LinkTree class."""

    def setUp(self):
        self.stage = Stage('link-tree-test')

        with working_dir(self.stage.path):
            touchp('source/1')
            touchp('source/a/b/2')
            touchp('source/a/b/3')
            touchp('source/c/4')
            touchp('source/c/d/5')
            touchp('source/c/d/6')
            touchp('source/c/d/e/7')

        source_path = os.path.join(self.stage.path, 'source')
        self.link_tree = LinkTree(source_path)


    def tearDown(self):
        if self.stage:
            self.stage.destroy()


    def check_file_link(self, filename):
        self.assertTrue(os.path.isfile(filename))
        self.assertTrue(os.path.islink(filename))


    def check_dir(self, filename):
        self.assertTrue(os.path.isdir(filename))


    def test_merge_to_new_directory(self):
        with working_dir(self.stage.path):
            self.link_tree.merge('dest')

            self.check_file_link('dest/1')
            self.check_file_link('dest/a/b/2')
            self.check_file_link('dest/a/b/3')
            self.check_file_link('dest/c/4')
            self.check_file_link('dest/c/d/5')
            self.check_file_link('dest/c/d/6')
            self.check_file_link('dest/c/d/e/7')

            self.link_tree.unmerge('dest')

            self.assertFalse(os.path.exists('dest'))


    def test_merge_to_existing_directory(self):
        with working_dir(self.stage.path):

            touchp('dest/x')
            touchp('dest/a/b/y')

            self.link_tree.merge('dest')

            self.check_file_link('dest/1')
            self.check_file_link('dest/a/b/2')
            self.check_file_link('dest/a/b/3')
            self.check_file_link('dest/c/4')
            self.check_file_link('dest/c/d/5')
            self.check_file_link('dest/c/d/6')
            self.check_file_link('dest/c/d/e/7')

            self.assertTrue(os.path.isfile('dest/x'))
            self.assertTrue(os.path.isfile('dest/a/b/y'))

            self.link_tree.unmerge('dest')

            self.assertTrue(os.path.isfile('dest/x'))
            self.assertTrue(os.path.isfile('dest/a/b/y'))

            self.assertFalse(os.path.isfile('dest/1'))
            self.assertFalse(os.path.isfile('dest/a/b/2'))
            self.assertFalse(os.path.isfile('dest/a/b/3'))
            self.assertFalse(os.path.isfile('dest/c/4'))
            self.assertFalse(os.path.isfile('dest/c/d/5'))
            self.assertFalse(os.path.isfile('dest/c/d/6'))
            self.assertFalse(os.path.isfile('dest/c/d/e/7'))


    def test_merge_with_empty_directories(self):
        with working_dir(self.stage.path):
            mkdirp('dest/f/g')
            mkdirp('dest/a/b/h')

            self.link_tree.merge('dest')
            self.link_tree.unmerge('dest')

            self.assertFalse(os.path.exists('dest/1'))
            self.assertFalse(os.path.exists('dest/a/b/2'))
            self.assertFalse(os.path.exists('dest/a/b/3'))
            self.assertFalse(os.path.exists('dest/c/4'))
            self.assertFalse(os.path.exists('dest/c/d/5'))
            self.assertFalse(os.path.exists('dest/c/d/6'))
            self.assertFalse(os.path.exists('dest/c/d/e/7'))

            self.assertTrue(os.path.isdir('dest/a/b/h'))
            self.assertTrue(os.path.isdir('dest/f/g'))


    def test_ignore(self):
        with working_dir(self.stage.path):
            touchp('source/.spec')
            touchp('dest/.spec')

            self.link_tree.merge('dest', ignore=lambda x: x == '.spec')
            self.link_tree.unmerge('dest', ignore=lambda x: x == '.spec')

            self.assertFalse(os.path.exists('dest/1'))
            self.assertFalse(os.path.exists('dest/a'))
            self.assertFalse(os.path.exists('dest/c'))

            self.assertTrue(os.path.isfile('source/.spec'))
            self.assertTrue(os.path.isfile('dest/.spec'))
Пример #49
0
            if not vspec.satisfies(spec):
                continue

            mirror_path = "%s/%s-%s.%s" % (
                pkg.name, pkg.name, version, extension(pkg.url))

            os.chdir(working_dir)
            mirror_file = join_path(args.directory, mirror_path)
            if os.path.exists(mirror_file):
                tty.msg("Already fetched %s." % mirror_file)
                num_mirrored += 1
                continue

            # Get the URL for the version and set up a stage to download it.
            url = pkg.url_for_version(version)
            stage = Stage(url)
            try:
                # fetch changes directory into the stage
                stage.fetch()

                if not args.no_checksum and version in pkg.versions:
                    digest = pkg.versions[version]
                    stage.check(digest)
                    tty.msg("Checksum passed for %s@%s" % (pkg.name, version))

                # change back and move the new archive into place.
                os.chdir(working_dir)
                shutil.move(stage.archive_file, mirror_file)
                tty.msg("Added %s to mirror" % mirror_file)
                num_mirrored += 1
Пример #50
0
    def test_restage(self):
        stage = Stage(archive_url, name=stage_name)

        stage.fetch()
        stage.expand_archive()
        stage.chdir_to_archive()
        self.check_expand_archive(stage, stage_name)
        self.check_chdir_to_archive(stage, stage_name)

        # Try to make a file in the old archive dir
        with closing(open('foobar', 'w')) as file:
            file.write("this file is to be destroyed.")

        self.assertTrue('foobar' in os.listdir(stage.expanded_archive_path))

        # Make sure the file is not there after restage.
        stage.restage()
        self.check_chdir(stage, stage_name)
        self.check_fetch(stage, stage_name)

        stage.chdir_to_archive()
        self.check_chdir_to_archive(stage, stage_name)
        self.assertFalse('foobar' in os.listdir(stage.expanded_archive_path))

        stage.destroy()
        self.check_destroy(stage, stage_name)