Ejemplo n.º 1
0
 def test_setup_and_destroy_no_name_without_tmp(self):
     with use_tmp(False):
         with Stage(archive_url) as stage:
             self.check_setup(stage, None)
         self.check_destroy(stage, None)
Ejemplo n.º 2
0
 def test_keep_without_exceptions(self, mock_archive):
     stage = Stage(mock_archive.url, name=self.stage_name, keep=True)
     with stage:
         pass
     path = get_stage_path(stage, self.stage_name)
     assert os.path.isdir(path)
Ejemplo n.º 3
0
 def test_setup_and_destroy_name_with_tmp(self):
     with use_tmp(True):
         with Stage(archive_url, name=stage_name) as stage:
             self.check_setup(stage, stage_name)
         self.check_destroy(stage, stage_name)
Ejemplo n.º 4
0
 def test_fetch(self, mock_archive):
     with Stage(mock_archive.url, name=self.stage_name) as stage:
         stage.fetch()
         check_setup(stage, self.stage_name, mock_archive)
         check_fetch(stage, self.stage_name)
     check_destroy(stage, self.stage_name)
Ejemplo n.º 5
0
 def test_no_keep_without_exceptions(self, mock_archive):
     stage = Stage(mock_archive.url, name=self.stage_name, keep=False)
     with stage:
         pass
     check_destroy(stage, self.stage_name)
Ejemplo n.º 6
0
 def test_keep_without_exceptions(self):
     with Stage(archive_url, name=stage_name, keep=True) as stage:
         pass
     path = self.get_stage_path(stage, stage_name)
     self.assertTrue(os.path.isdir(path))
Ejemplo n.º 7
0
def _tweak_dev_package_fetcher(dp, spec):
    """Attempt to configure the package's fetcher and stage to obtain the
source we want to develop for this package.
"""
    if dp.tag_or_branch is None:
        # Nothing to do.
        return

    fetcher_Version = None
    spack_package = spec.package

    # We want the tag or branch specified in dp.
    package_Version = Version(dp.tag_or_branch)
    develop_Version = Version('develop')
    if package_Version in spack_package.versions and \
       _version_is_vc(spack_package, package_Version):
        # Specified version is version-controlled.
        fetcher_Version = package_Version
    elif not fetcher_Version and \
       develop_Version in spack_package.versions and \
       _version_is_vc(spack_package, develop_Version):
        # Repurpose develop to obtain the tag/branch we need.
        version_dict = spack_package.versions[develop_Version]
        # Attempt to tweak things to check out our desired tag or branch.
        if 'git' in version_dict:  # Git.
            version_dict.pop('commit', None)
            if dp.key == 'tag':
                version_dict['tag'] = dp.tag_or_branch
                version_dict.pop('branch', None)
            else:  # Branch.
                version_dict['branch'] = dp.tag_or_branch
                version_dict.pop('tag', None)
        elif 'hg' in version_dict:  # Mercury
            version_dict['revision'] = dp.tag_or_branch
        elif 'svn' in version_dict:  # Subversion.
            # Can't reliably do anything here since SVN URL structure is
            # convention only, and it is also not possible to reliably
            # distinguish between two common conventions
            # ('project/<trunk-or-branches-or-tags>' vs
            # '<trunk-or-branches-or-tags>/project'.
            raise ExtrapolationError(
                'For subversion repositories, a VC version corresponding to '
                '{0} must be defined in the recipe for {1}.'.format(
                    dp.tag_or_branch, dp.name))
        else:
            raise SpackError('INTERNAL ERROR: spack dev cannot handle '
                             'apparently-supported VC method\n'
                             'version_dict = {0}'.format(version_dict))
        fetcher_Version = develop_Version

    if fetcher_Version:
        version_dict = spack_package.versions[fetcher_Version]
        version_dict['no_cache'] = True  # Disable caching.
        if 'git' in version_dict:
            # Disable efficiency options that aren't wanted here.
            version_dict.update({'full_depth': True, 'all_branches': True})
        spack_package.fetcher = fs.for_package_version(spack_package,
                                                       fetcher_Version)
        spack_package.stage = Stage(spack_package.fetcher,
                                    path=spack_package.path)
    else:
        tty.warn('Spack dev unable to obtain VC source for package {0} {1}'
                 '\nFalling back to version {2} as concretized'.format(
                     dp.name, 'with user-specified {0} {1}'.format(
                         dp.key, dp.tag_or_branch) if dp.key else '',
                     spack_package.version))
Ejemplo n.º 8
0
 def test_stage_constructor_no_fetcher(self):
     """Ensure Stage constructor with no URL or fetch strategy fails."""
     with pytest.raises(ValueError):
         with Stage(None):
             pass
Ejemplo n.º 9
0
 def test_stage_constructor_with_path(self, tmpdir):
     """Ensure Stage constructor with a path uses it."""
     testpath = str(tmpdir)
     with Stage('file:///does-not-exist', path=testpath) as stage:
         assert stage.path == testpath
Ejemplo n.º 10
0
 def test_setup_and_destroy_name_with_tmp(self, mock_stage_archive):
     archive = mock_stage_archive()
     with Stage(archive.url, name=self.stage_name) as stage:
         check_setup(stage, self.stage_name, archive)
     check_destroy(stage, self.stage_name)
Ejemplo n.º 11
0
 def test_setup_and_destroy_no_name_with_tmp(self, mock_stage_archive):
     archive = mock_stage_archive()
     with Stage(archive.url) as stage:
         check_setup(stage, None, archive)
     check_destroy(stage, None)
Ejemplo n.º 12
0
 def test_setup_and_destroy_name_without_tmp(self):
     with use_tmp(False):
         with Stage(self.archive_url, name=self.stage_name) as stage:
             self.check_setup(stage, self.stage_name)
         self.check_destroy(stage, self.stage_name)
Ejemplo n.º 13
0
            if not vspec.satisfies(spec):
                continue

            mirror_path = "%s/%s-%s.%s" % (pkg.name, pkg.name, version,
                                           extension(pkg.url))

            os.chdir(working_dir)
            mirror_file = join_path(args.directory, mirror_path)
            if os.path.exists(mirror_file):
                tty.msg("Already fetched %s." % mirror_file)
                num_mirrored += 1
                continue

            # Get the URL for the version and set up a stage to download it.
            url = pkg.url_for_version(version)
            stage = Stage(url)
            try:
                # fetch changes directory into the stage
                stage.fetch()

                if not args.no_checksum and version in pkg.versions:
                    digest = pkg.versions[version]
                    stage.check(digest)
                    tty.msg("Checksum passed for %s@%s" % (pkg.name, version))

                # change back and move the new archive into place.
                os.chdir(working_dir)
                shutil.move(stage.archive_file, mirror_file)
                tty.msg("Added %s to mirror" % mirror_file)
                num_mirrored += 1
Ejemplo n.º 14
0
def get_checksums(url_dict, name, **kwargs):
    """Fetches and checksums archives from URLs.

    This function is called by both ``spack checksum`` and ``spack create``.
    The ``first_stage_function`` kwarg allows ``spack create`` to determine
    things like the build system of the archive.

    :param dict url_dict: A dictionary of the form: version -> URL
    :param str name: The name of the package
    :param callable first_stage_function: Function to run on first staging area
    :param bool keep_stage: Don't clean up staging area when command completes

    :returns: A multi-line string containing versions and corresponding hashes
    :rtype: str
    """
    first_stage_function = kwargs.get('first_stage_function', None)
    keep_stage = kwargs.get('keep_stage', False)

    sorted_versions = sorted(url_dict.keys(), reverse=True)

    # Find length of longest string in the list for padding
    max_len = max(len(str(v)) for v in sorted_versions)
    num_ver = len(sorted_versions)

    tty.msg(
        "Found {0} version{1} of {2}:".format(num_ver,
                                              '' if num_ver == 1 else 's',
                                              name), "",
        *spack.cmd.elide_list([
            "{0:{1}}  {2}".format(v, max_len, url_dict[v])
            for v in sorted_versions
        ]))
    print()

    archives_to_fetch = tty.get_number("How many would you like to checksum?",
                                       default=1,
                                       abort='q')

    if not archives_to_fetch:
        tty.die("Aborted.")

    versions = sorted_versions[:archives_to_fetch]
    urls = [url_dict[v] for v in versions]

    tty.msg("Downloading...")
    version_hashes = []
    i = 0
    for url, version in zip(urls, versions):
        try:
            with Stage(url, keep=keep_stage) as stage:
                # Fetch the archive
                stage.fetch()
                if i == 0 and first_stage_function:
                    # Only run first_stage_function the first time,
                    # no need to run it every time
                    first_stage_function(stage, url)

                # Checksum the archive and add it to the list
                version_hashes.append(
                    (version,
                     spack.util.crypto.checksum(hashlib.md5,
                                                stage.archive_file)))
                i += 1
        except FailedDownloadError:
            tty.msg("Failed to fetch {0}".format(url))
        except Exception as e:
            tty.msg("Something failed on {0}, skipping.".format(url),
                    "  ({0})".format(e))

    if not version_hashes:
        tty.die("Could not fetch any versions for {0}".format(name))

    # Find length of longest string in the list for padding
    max_len = max(len(str(v)) for v, h in version_hashes)

    # Generate the version directives to put in a package.py
    version_lines = "\n".join([
        "    version('{0}', {1}'{2}')".format(v, ' ' * (max_len - len(str(v))),
                                              h) for v, h in version_hashes
    ])

    num_hash = len(version_hashes)
    tty.msg("Checksummed {0} version{1} of {2}".format(
        num_hash, '' if num_hash == 1 else 's', name))

    return version_lines
Ejemplo n.º 15
0
 def test_chdir(self):
     with Stage(archive_url, name=stage_name) as stage:
         stage.chdir()
         self.check_setup(stage, stage_name)
         self.check_chdir(stage, stage_name)
     self.check_destroy(stage, stage_name)
Ejemplo n.º 16
0
 def test_setup_and_destroy_name_without_tmp(self, mock_archive):
     with Stage(mock_archive.url, name=self.stage_name) as stage:
         check_setup(stage, self.stage_name, mock_archive)
     check_destroy(stage, self.stage_name)
Ejemplo n.º 17
0
 def test_no_keep_without_exceptions(self):
     with Stage(archive_url, name=stage_name, keep=False) as stage:
         pass
     self.check_destroy(stage, stage_name)
Ejemplo n.º 18
0
 def test_setup_and_destroy_no_name_without_tmp(self, mock_archive):
     with Stage(mock_archive.url) as stage:
         check_setup(stage, None, mock_archive)
     check_destroy(stage, None)
Ejemplo n.º 19
0
def sync_fn(args):
    """ Syncs binaries (and associated metadata) from one mirror to another.
    Requires an active environment in order to know which specs to sync.

    Args:
        src (str): Source mirror URL
        dest (str): Destination mirror URL
    """
    # Figure out the source mirror
    source_location = None
    if args.src_directory:
        source_location = args.src_directory
        scheme = url_util.parse(source_location, scheme='<missing>').scheme
        if scheme != '<missing>':
            raise ValueError(
                '"--src-directory" expected a local path; got a URL, instead')
        # Ensure that the mirror lookup does not mistake this for named mirror
        source_location = 'file://' + source_location
    elif args.src_mirror_name:
        source_location = args.src_mirror_name
        result = spack.mirror.MirrorCollection().lookup(source_location)
        if result.name == "<unnamed>":
            raise ValueError('no configured mirror named "{name}"'.format(
                name=source_location))
    elif args.src_mirror_url:
        source_location = args.src_mirror_url
        scheme = url_util.parse(source_location, scheme='<missing>').scheme
        if scheme == '<missing>':
            raise ValueError(
                '"{url}" is not a valid URL'.format(url=source_location))

    src_mirror = spack.mirror.MirrorCollection().lookup(source_location)
    src_mirror_url = url_util.format(src_mirror.fetch_url)

    # Figure out the destination mirror
    dest_location = None
    if args.dest_directory:
        dest_location = args.dest_directory
        scheme = url_util.parse(dest_location, scheme='<missing>').scheme
        if scheme != '<missing>':
            raise ValueError(
                '"--dest-directory" expected a local path; got a URL, instead')
        # Ensure that the mirror lookup does not mistake this for named mirror
        dest_location = 'file://' + dest_location
    elif args.dest_mirror_name:
        dest_location = args.dest_mirror_name
        result = spack.mirror.MirrorCollection().lookup(dest_location)
        if result.name == "<unnamed>":
            raise ValueError('no configured mirror named "{name}"'.format(
                name=dest_location))
    elif args.dest_mirror_url:
        dest_location = args.dest_mirror_url
        scheme = url_util.parse(dest_location, scheme='<missing>').scheme
        if scheme == '<missing>':
            raise ValueError(
                '"{url}" is not a valid URL'.format(url=dest_location))

    dest_mirror = spack.mirror.MirrorCollection().lookup(dest_location)
    dest_mirror_url = url_util.format(dest_mirror.fetch_url)

    # Get the active environment
    env = spack.cmd.require_active_env(cmd_name='buildcache sync')

    tty.msg('Syncing environment buildcache files from {0} to {1}'.format(
        src_mirror_url, dest_mirror_url))

    build_cache_dir = bindist.build_cache_relative_path()
    buildcache_rel_paths = []

    tty.debug('Syncing the following specs:')
    for s in env.all_specs():
        tty.debug('  {0}{1}: {2}'.format('* ' if s in env.roots() else '  ',
                                         s.name, s.dag_hash()))

        buildcache_rel_paths.extend([
            os.path.join(build_cache_dir,
                         bindist.tarball_path_name(s, '.spack')),
            os.path.join(build_cache_dir,
                         bindist.tarball_name(s, '.spec.yaml')),
            os.path.join(build_cache_dir,
                         bindist.tarball_name(s, '.spec.json')),
        ])

    tmpdir = tempfile.mkdtemp()

    try:
        for rel_path in buildcache_rel_paths:
            src_url = url_util.join(src_mirror_url, rel_path)
            local_path = os.path.join(tmpdir, rel_path)
            dest_url = url_util.join(dest_mirror_url, rel_path)

            tty.debug('Copying {0} to {1} via {2}'.format(
                src_url, dest_url, local_path))

            stage = Stage(src_url,
                          name="temporary_file",
                          path=os.path.dirname(local_path),
                          keep=True)

            try:
                stage.create()
                stage.fetch()
                web_util.push_to_url(local_path, dest_url, keep_original=True)
            except fs.FetchError as e:
                tty.debug(
                    'spack buildcache unable to sync {0}'.format(rel_path))
                tty.debug(e)
            finally:
                stage.destroy()
    finally:
        shutil.rmtree(tmpdir)
Ejemplo n.º 20
0
def create(path, specs, **kwargs):
    """Create a directory to be used as a spack mirror, and fill it with
       package archives.

       Arguments:
         path    Path to create a mirror directory hierarchy in.
         specs   Any package versions matching these specs will be added
                 to the mirror.

       Keyword args:
         no_checksum:  If True, do not checkpoint when fetching (default False)
         num_versions: Max number of versions to fetch per spec,
                       if spec is ambiguous (default is 0 for all of them)

       Return Value:
         Returns a tuple of lists: (present, mirrored, error)
         * present:  Package specs that were already prsent.
         * mirrored: Package specs that were successfully mirrored.
         * error:    Package specs that failed to mirror due to some error.

       This routine iterates through all known package versions, and
       it creates specs for those versions.  If the version satisfies any spec
       in the specs list, it is downloaded and added to the mirror.
    """
    # Make sure nothing is in the way.
    if os.path.isfile(path):
        raise MirrorError("%s already exists and is a file." % path)

    # automatically spec-ify anything in the specs array.
    specs = [s if isinstance(s, Spec) else Spec(s) for s in specs]

    # Get concrete specs for each matching version of these specs.
    version_specs = get_matching_versions(specs,
                                          num_versions=kwargs.get(
                                              'num_versions', 0))
    for s in version_specs:
        s.concretize()

    # Get the absolute path of the root before we start jumping around.
    mirror_root = os.path.abspath(path)
    if not os.path.isdir(mirror_root):
        mkdirp(mirror_root)

    # Things to keep track of while parsing specs.
    present = []
    mirrored = []
    error = []

    # Iterate through packages and download all the safe tarballs for each of them
    for spec in version_specs:
        pkg = spec.package

        stage = None
        try:
            # create a subdirectory for the current package@version
            subdir = join_path(mirror_root, pkg.name)
            mkdirp(subdir)

            archive_file = mirror_archive_filename(spec)
            archive_path = join_path(subdir, archive_file)

            if os.path.exists(archive_path):
                tty.msg("Already added %s" % spec.format("$_$@"))
                present.append(spec)
                continue

            # Set up a stage and a fetcher for the download
            unique_fetch_name = spec.format("$_$@")
            fetcher = fs.for_package_version(pkg, pkg.version)
            stage = Stage(fetcher, name=unique_fetch_name)
            fetcher.set_stage(stage)

            # Do the fetch and checksum if necessary
            fetcher.fetch()
            if not kwargs.get('no_checksum', False):
                fetcher.check()
                tty.msg("Checksum passed for %s@%s" % (pkg.name, pkg.version))

            # Fetchers have to know how to archive their files.  Use
            # that to move/copy/create an archive in the mirror.
            fetcher.archive(archive_path)
            tty.msg("Added %s." % spec.format("$_$@"))
            mirrored.append(spec)

        except Exception, e:
            if spack.debug:
                sys.excepthook(*sys.exc_info())
            else:
                tty.warn("Error while fetching %s." % spec.format('$_$@'),
                         e.message)
            error.append(spec)

        finally: