Exemple #1
0
    def test_non_existing_dir(self, stage):
        """Test installing to a non-existing directory."""

        with fs.working_dir(str(stage)):
            fs.install_tree('source', 'dest/sub/directory')

            assert os.path.exists('dest/sub/directory/a/b/2')
Exemple #2
0
    def test_non_existing_dir(self, stage):
        """Test installing to a non-existing directory."""

        with fs.working_dir(str(stage)):
            fs.install_tree('source', 'dest/sub/directory')

            assert os.path.exists('dest/sub/directory/a/b/2')
Exemple #3
0
    def install(self, spec, prefix):

        with working_dir(os.path.join(self.rel_ARCHDIR, 'lib')):
            os.symlink('libmct.a', 'libmct_oasis.a')
            os.symlink('libmpeu.a', 'libmpeu_oasis.a')

        install_tree(self.rel_ARCHDIR, prefix)
    def test_parent_dir(self, stage):
        """Test source as a parent directory of destination."""

        with fs.working_dir(str(stage)):
            match = 'Cannot copy ancestor directory'
            with pytest.raises(ValueError, match=match):
                fs.install_tree('source', 'source/sub/directory')
    def test_existing_dir(self, stage):
        """Test installing to an existing directory."""

        with fs.working_dir(str(stage)):
            fs.install_tree('source', 'dest')

            assert os.path.exists('dest/a/b/2')
            check_added_exe_permissions('source/a/b/2', 'dest/a/b/2')
Exemple #6
0
    def test_symlinks_false(self, stage):
        """Test installing without symlink preservation."""

        with fs.working_dir(str(stage)):
            fs.install_tree('source', 'dest', symlinks=False)

            assert os.path.exists('dest/2')
            assert not os.path.islink('dest/2')
Exemple #7
0
    def test_symlinks_false(self, stage):
        """Test installing without symlink preservation."""

        with fs.working_dir(str(stage)):
            fs.install_tree('source', 'dest', symlinks=False)

            assert os.path.exists('dest/2')
            assert not os.path.islink('dest/2')
Exemple #8
0
    def test_symlinks_true(self, stage):
        """Test installing with symlink preservation."""

        with fs.working_dir(str(stage)):
            fs.install_tree('source', 'dest', symlinks=True)

            assert os.path.exists('dest/2')
            assert os.path.islink('dest/2')
            check_added_exe_permissions('source/2', 'dest/2')
    def test_symlinks_false(self, stage):
        """Test installing without symlink preservation."""

        with fs.working_dir(str(stage)):
            fs.install_tree('source', 'dest', symlinks=False)

            assert os.path.exists('dest/2')
            if sys.platform != "win32":
                assert not os.path.islink('dest/2')
            check_added_exe_permissions('source/2', 'dest/2')
    def test_glob_src(self, stage):
        """Test using a glob as the source."""

        with fs.working_dir(str(stage)):
            fs.install_tree('source/g/*', 'dest')

            assert os.path.exists('dest/i/8')
            assert os.path.exists('dest/i/9')
            assert os.path.exists('dest/j/10')
            check_added_exe_permissions('source/g/h/i/8', 'dest/i/8')
            check_added_exe_permissions('source/g/h/i/9', 'dest/i/9')
            check_added_exe_permissions('source/g/i/j/10', 'dest/j/10')
Exemple #11
0
    def install(self, spec, prefix):

        # edit cns_solve_environment to allow a build
        shutil.copy('cns_solve_env', 'cns_solve_env.back')
        filter_file(r"setenv CNS_SOLVE '_CNSsolve_location_'",
                    f"setenv CNS_SOLVE '{self.stage.source_path}'",
                    'cns_solve_env')

        # copy over an almost right machine make file we could have got it from v1.3 but this is simpler
        src_file = 'instlib/machine/supported/intel-x86_64bit-linux/Makefile.header.2.gfortran'
        dest_file = 'instlib/machine/supported/mac-intel-darwin/Makefile.header.5.gfortran'
        shutil.move(src_file, dest_file)

        if not self.spec.satisfies('%fortran@:10.0.0'):
            # patch the machine make file, can't be done with a patch statement it doesn't exists till we copy it
            # tried just copying the file from the package directory but it caused a lockup
            patch = which('patch')
            patch_file = join_path(
                package_root, 'nmrpack/packages/cns',
                'gfortran_10_allow_argument_mismatch.patch')
            patch('-p1', '-i', patch_file)

        if '+aria' in self.spec:
            from_path = pathlib.Path('aria2.3/cns/src')
            to_path = 'source'

            for target_file in from_path.iterdir():
                if target_file.is_file() and target_file.suffix in ('.f',
                                                                    '.inc'):
                    print(f'copying {target_file} to {to_path}')
                    shutil.copy(target_file, to_path)
                if target_file.is_dir():
                    print(f'copying {target_file} to {to_path}')
                    shutil.copytree(target_file,
                                    join_path(to_path, target_file.name))

            shutil.copytree(from_path, 'aria2.3_patches_applied')
            shutil.rmtree('aria2.3')

        make('install')

        install_tree('.', prefix)

        with working_dir(prefix):
            shutil.move('cns_solve_env.back', 'cns_solve_env')

            replacement_env = f" setenv CNS_SOLVE  '{prefix}'"
            filter_file(r"setenv CNS_SOLVE '_CNSsolve_location_'",
                        replacement_env, 'cns_solve_env')

        # remove a leftover from our previous edits
        os.remove(pathlib.Path(prefix) / pathlib.Path('cns_solve_env' + '~'))
Exemple #12
0
    def _add_to_root_stage(self):
        """
        Move the extracted resource to the root stage (according to placement).
        """
        root_stage = self.root_stage
        resource = self.resource

        if resource.placement:
            placement = resource.placement
        elif self.srcdir:
            placement = self.srcdir
        else:
            placement = self.source_path

        if not isinstance(placement, dict):
            placement = {'': placement}

        target_path = os.path.join(
            root_stage.source_path, resource.destination)

        try:
            os.makedirs(target_path)
        except OSError as err:
            tty.debug(err)
            if err.errno == errno.EEXIST and os.path.isdir(target_path):
                pass
            else:
                raise

        for key, value in iteritems(placement):
            destination_path = os.path.join(target_path, value)
            source_path = os.path.join(self.source_path, key)

            if not os.path.exists(destination_path):
                tty.info('Moving resource stage\n\tsource : '
                         '{stage}\n\tdestination : {destination}'.format(
                             stage=source_path, destination=destination_path
                         ))

                src = os.path.realpath(source_path)

                if os.path.isdir(src):
                    install_tree(src, destination_path)
                else:
                    install(src, destination_path)
Exemple #13
0
def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
                  allow_root=False, key=None):
    """
    Build a tarball from given spec and put it into the directory structure
    used at the mirror (following <tarball_directory_name>).
    """
    # set up some paths
    tarfile_name = tarball_name(spec, '.tar.gz')
    tarfile_dir = os.path.join(outdir, "build_cache",
                               tarball_directory_name(spec))
    tarfile_path = os.path.join(tarfile_dir, tarfile_name)
    mkdirp(tarfile_dir)
    spackfile_path = os.path.join(
        outdir, "build_cache", tarball_path_name(spec, '.spack'))
    if os.path.exists(spackfile_path):
        if force:
            os.remove(spackfile_path)
        else:
            raise NoOverwriteException(str(spackfile_path))
    # need to copy the spec file so the build cache can be downloaded
    # without concretizing with the current spack packages
    # and preferences
    spec_file = os.path.join(spec.prefix, ".spack", "spec.yaml")
    specfile_name = tarball_name(spec, '.spec.yaml')
    specfile_path = os.path.realpath(
        os.path.join(outdir, "build_cache", specfile_name))
    indexfile_path = os.path.join(outdir, "build_cache", "index.html")
    if os.path.exists(specfile_path):
        if force:
            os.remove(specfile_path)
        else:
            raise NoOverwriteException(str(specfile_path))
    # make a copy of the install directory to work with
    workdir = os.path.join(tempfile.mkdtemp(), os.path.basename(spec.prefix))
    install_tree(spec.prefix, workdir, symlinks=True)

    # create info for later relocation and create tar
    write_buildinfo_file(spec.prefix, workdir, rel=rel)

    # optinally make the paths in the binaries relative to each other
    # in the spack install tree before creating tarball
    if rel:
        try:
            make_package_relative(workdir, spec.prefix, allow_root)
        except Exception as e:
            shutil.rmtree(workdir)
            shutil.rmtree(tarfile_dir)
            tty.die(str(e))
    else:
        try:
            make_package_placeholder(workdir, allow_root)
        except Exception as e:
            shutil.rmtree(workdir)
            shutil.rmtree(tarfile_dir)
            tty.die(str(e))
    # create compressed tarball of the install prefix
    with closing(tarfile.open(tarfile_path, 'w:gz')) as tar:
        tar.add(name='%s' % workdir,
                arcname='%s' % os.path.basename(spec.prefix))
    # remove copy of install directory
    shutil.rmtree(workdir)

    # get the sha256 checksum of the tarball
    checksum = checksum_tarball(tarfile_path)

    # add sha256 checksum to spec.yaml
    spec_dict = {}
    with open(spec_file, 'r') as inputfile:
        content = inputfile.read()
        spec_dict = yaml.load(content)
    bchecksum = {}
    bchecksum['hash_algorithm'] = 'sha256'
    bchecksum['hash'] = checksum
    spec_dict['binary_cache_checksum'] = bchecksum
    # Add original install prefix relative to layout root to spec.yaml.
    # This will be used to determine is the directory layout has changed.
    buildinfo = {}
    buildinfo['relative_prefix'] = os.path.relpath(
        spec.prefix, spack.store.layout.root)
    spec_dict['buildinfo'] = buildinfo
    with open(specfile_path, 'w') as outfile:
        outfile.write(yaml.dump(spec_dict))
    # sign the tarball and spec file with gpg
    if not unsigned:
        sign_tarball(key, force, specfile_path)
    # put tarball, spec and signature files in .spack archive
    with closing(tarfile.open(spackfile_path, 'w')) as tar:
        tar.add(name='%s' % tarfile_path, arcname='%s' % tarfile_name)
        tar.add(name='%s' % specfile_path, arcname='%s' % specfile_name)
        if not unsigned:
            tar.add(name='%s.asc' % specfile_path,
                    arcname='%s.asc' % specfile_name)

    # cleanup file moved to archive
    os.remove(tarfile_path)
    if not unsigned:
        os.remove('%s.asc' % specfile_path)

    # create an index.html for the build_cache directory so specs can be found
    if os.path.exists(indexfile_path):
        os.remove(indexfile_path)
    generate_index(outdir, indexfile_path)
    return None
    def test_non_existing_src(self, stage):
        """Test using a non-existing source."""

        with fs.working_dir(str(stage)):
            with pytest.raises(IOError, match='No such file or directory'):
                fs.install_tree('source/none', 'dest')
def build_tarball(spec,
                  outdir,
                  force=False,
                  rel=False,
                  yes_to_all=False,
                  key=None):
    """
    Build a tarball from given spec and put it into the directory structure
    used at the mirror (following <tarball_directory_name>).
    """
    # set up some paths
    tarfile_name = tarball_name(spec, '.tar.gz')
    tarfile_dir = join_path(outdir, "build_cache",
                            tarball_directory_name(spec))
    tarfile_path = join_path(tarfile_dir, tarfile_name)
    mkdirp(tarfile_dir)
    spackfile_path = os.path.join(outdir, "build_cache",
                                  tarball_path_name(spec, '.spack'))
    if os.path.exists(spackfile_path):
        if force:
            os.remove(spackfile_path)
        else:
            raise NoOverwriteException(str(spackfile_path))
    # need to copy the spec file so the build cache can be downloaded
    # without concretizing with the current spack packages
    # and preferences
    spec_file = join_path(spec.prefix, ".spack", "spec.yaml")
    specfile_name = tarball_name(spec, '.spec.yaml')
    specfile_path = os.path.realpath(
        join_path(outdir, "build_cache", specfile_name))
    indexfile_path = join_path(outdir, "build_cache", "index.html")
    if os.path.exists(specfile_path):
        if force:
            os.remove(specfile_path)
        else:
            raise NoOverwriteException(str(specfile_path))
    # make a copy of the install directory to work with
    workdir = join_path(outdir, os.path.basename(spec.prefix))
    if os.path.exists(workdir):
        shutil.rmtree(workdir)
    install_tree(spec.prefix, workdir, symlinks=True)

    # create info for later relocation and create tar
    write_buildinfo_file(spec.prefix, workdir, rel=rel)

    # optinally make the paths in the binaries relative to each other
    # in the spack install tree before creating tarball
    if rel:
        make_package_relative(workdir, spec.prefix)
    # create compressed tarball of the install prefix
    with closing(tarfile.open(tarfile_path, 'w:gz')) as tar:
        tar.add(name='%s' % workdir, arcname='%s' % os.path.basename(workdir))
    # remove copy of install directory
    shutil.rmtree(workdir)

    # get the sha256 checksum of the tarball
    checksum = checksum_tarball(tarfile_path)

    # add sha256 checksum to spec.yaml
    spec_dict = {}
    with open(spec_file, 'r') as inputfile:
        content = inputfile.read()
        spec_dict = yaml.load(content)
    bchecksum = {}
    bchecksum['hash_algorithm'] = 'sha256'
    bchecksum['hash'] = checksum
    spec_dict['binary_cache_checksum'] = bchecksum
    with open(specfile_path, 'w') as outfile:
        outfile.write(yaml.dump(spec_dict))
    signed = False
    if not yes_to_all:
        # sign the tarball and spec file with gpg
        try:
            sign_tarball(yes_to_all, key, force, specfile_path)
            signed = True
        except NoGpgException:
            raise NoGpgException()
        except PickKeyException:
            raise PickKeyException()
        except NoKeyException():
            raise NoKeyException()
    # put tarball, spec and signature files in .spack archive
    with closing(tarfile.open(spackfile_path, 'w')) as tar:
        tar.add(name='%s' % tarfile_path, arcname='%s' % tarfile_name)
        tar.add(name='%s' % specfile_path, arcname='%s' % specfile_name)
        if signed:
            tar.add(name='%s.asc' % specfile_path,
                    arcname='%s.asc' % specfile_name)

    # cleanup file moved to archive
    os.remove(tarfile_path)
    if signed:
        os.remove('%s.asc' % specfile_path)

    # create an index.html for the build_cache directory so specs can be found
    if os.path.exists(indexfile_path):
        os.remove(indexfile_path)
    generate_index(outdir, indexfile_path)
    return None
Exemple #16
0
    def install(self, spec, prefix):
        """Copy to installation prefix."""

        with working_dir(self.build_directory):
            install_tree('.', prefix)
Exemple #17
0
def extract_tarball(spec, filename, allow_root=False, unsigned=False,
                    force=False):
    """
    extract binary tarball for given package into install area
    """
    if os.path.exists(spec.prefix):
        if force:
            shutil.rmtree(spec.prefix)
        else:
            raise NoOverwriteException(str(spec.prefix))

    tmpdir = tempfile.mkdtemp()
    stagepath = os.path.dirname(filename)
    spackfile_name = tarball_name(spec, '.spack')
    spackfile_path = os.path.join(stagepath, spackfile_name)
    tarfile_name = tarball_name(spec, '.tar.gz')
    tarfile_path = os.path.join(tmpdir, tarfile_name)
    specfile_name = tarball_name(spec, '.spec.yaml')
    specfile_path = os.path.join(tmpdir, specfile_name)

    with closing(tarfile.open(spackfile_path, 'r')) as tar:
        tar.extractall(tmpdir)
    if not unsigned:
        if os.path.exists('%s.asc' % specfile_path):
            try:
                Gpg.verify('%s.asc' % specfile_path, specfile_path)
            except Exception as e:
                shutil.rmtree(tmpdir)
                tty.die(str(e))
        else:
            shutil.rmtree(tmpdir)
            raise NoVerifyException(
                "Package spec file failed signature verification.\n"
                "Use spack buildcache keys to download "
                "and install a key for verification from the mirror.")
    # get the sha256 checksum of the tarball
    checksum = checksum_tarball(tarfile_path)

    # get the sha256 checksum recorded at creation
    spec_dict = {}
    with open(specfile_path, 'r') as inputfile:
        content = inputfile.read()
        spec_dict = yaml.load(content)
    bchecksum = spec_dict['binary_cache_checksum']

    # if the checksums don't match don't install
    if bchecksum['hash'] != checksum:
        shutil.rmtree(tmpdir)
        raise NoChecksumException(
            "Package tarball failed checksum verification.\n"
            "It cannot be installed.")

    new_relative_prefix = str(os.path.relpath(spec.prefix,
                                              spack.store.layout.root))
    # if the original relative prefix is in the spec file use it
    buildinfo = spec_dict.get('buildinfo', {})
    old_relative_prefix = buildinfo.get('relative_prefix', new_relative_prefix)
    # if the original relative prefix and new relative prefix differ the
    # directory layout has changed and the  buildcache cannot be installed
    if old_relative_prefix != new_relative_prefix:
        shutil.rmtree(tmpdir)
        msg = "Package tarball was created from an install "
        msg += "prefix with a different directory layout.\n"
        msg += "It cannot be relocated."
        raise NewLayoutException(msg)

    # extract the tarball in a temp directory
    with closing(tarfile.open(tarfile_path, 'r')) as tar:
        tar.extractall(path=tmpdir)
    # the base of the install prefix is used when creating the tarball
    # so the pathname should be the same now that the directory layout
    # is confirmed
    workdir = os.path.join(tmpdir, os.path.basename(spec.prefix))

    # cleanup
    os.remove(tarfile_path)
    os.remove(specfile_path)

    try:
        relocate_package(workdir, allow_root)
    except Exception as e:
        shutil.rmtree(workdir)
        tty.die(str(e))
    # Delay creating spec.prefix until verification is complete
    # and any relocation has been done.
    else:
        install_tree(workdir, spec.prefix, symlinks=True)
    finally:
        shutil.rmtree(tmpdir)
def extract_tarball(spec, filename, allow_root=False, unsigned=False,
                    force=False):
    """
    extract binary tarball for given package into install area
    """
    if os.path.exists(spec.prefix):
        if force:
            shutil.rmtree(spec.prefix)
        else:
            raise NoOverwriteException(str(spec.prefix))

    tmpdir = tempfile.mkdtemp()
    stagepath = os.path.dirname(filename)
    spackfile_name = tarball_name(spec, '.spack')
    spackfile_path = os.path.join(stagepath, spackfile_name)
    tarfile_name = tarball_name(spec, '.tar.gz')
    tarfile_path = os.path.join(tmpdir, tarfile_name)
    specfile_name = tarball_name(spec, '.spec.yaml')
    specfile_path = os.path.join(tmpdir, specfile_name)

    with closing(tarfile.open(spackfile_path, 'r')) as tar:
        tar.extractall(tmpdir)
    if not unsigned:
        if os.path.exists('%s.asc' % specfile_path):
            try:
                Gpg.verify('%s.asc' % specfile_path, specfile_path)
            except Exception as e:
                shutil.rmtree(tmpdir)
                tty.die(str(e))
        else:
            shutil.rmtree(tmpdir)
            raise NoVerifyException(
                "Package spec file failed signature verification.\n"
                "Use spack buildcache keys to download "
                "and install a key for verification from the mirror.")
    # get the sha256 checksum of the tarball
    checksum = checksum_tarball(tarfile_path)

    # get the sha256 checksum recorded at creation
    spec_dict = {}
    with open(specfile_path, 'r') as inputfile:
        content = inputfile.read()
        spec_dict = syaml.load(content)
    bchecksum = spec_dict['binary_cache_checksum']

    # if the checksums don't match don't install
    if bchecksum['hash'] != checksum:
        shutil.rmtree(tmpdir)
        raise NoChecksumException(
            "Package tarball failed checksum verification.\n"
            "It cannot be installed.")

    new_relative_prefix = str(os.path.relpath(spec.prefix,
                                              spack.store.layout.root))
    # if the original relative prefix is in the spec file use it
    buildinfo = spec_dict.get('buildinfo', {})
    old_relative_prefix = buildinfo.get('relative_prefix', new_relative_prefix)
    # if the original relative prefix and new relative prefix differ the
    # directory layout has changed and the  buildcache cannot be installed
    if old_relative_prefix != new_relative_prefix:
        shutil.rmtree(tmpdir)
        msg = "Package tarball was created from an install "
        msg += "prefix with a different directory layout.\n"
        msg += "It cannot be relocated."
        raise NewLayoutException(msg)

    # extract the tarball in a temp directory
    with closing(tarfile.open(tarfile_path, 'r')) as tar:
        tar.extractall(path=tmpdir)
    # the base of the install prefix is used when creating the tarball
    # so the pathname should be the same now that the directory layout
    # is confirmed
    workdir = os.path.join(tmpdir, os.path.basename(spec.prefix))

    # cleanup
    os.remove(tarfile_path)
    os.remove(specfile_path)

    try:
        relocate_package(workdir, allow_root)
    except Exception as e:
        shutil.rmtree(workdir)
        tty.die(str(e))
    # Delay creating spec.prefix until verification is complete
    # and any relocation has been done.
    else:
        install_tree(workdir, spec.prefix, symlinks=True)
    finally:
        shutil.rmtree(tmpdir)
def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
                  allow_root=False, key=None, regenerate_index=False):
    """
    Build a tarball from given spec and put it into the directory structure
    used at the mirror (following <tarball_directory_name>).
    """
    if not spec.concrete:
        raise ValueError('spec must be concrete to build tarball')

    # set up some paths
    build_cache_dir = build_cache_directory(outdir)

    tarfile_name = tarball_name(spec, '.tar.gz')
    tarfile_dir = os.path.join(build_cache_dir,
                               tarball_directory_name(spec))
    tarfile_path = os.path.join(tarfile_dir, tarfile_name)
    mkdirp(tarfile_dir)
    spackfile_path = os.path.join(
        build_cache_dir, tarball_path_name(spec, '.spack'))
    if os.path.exists(spackfile_path):
        if force:
            os.remove(spackfile_path)
        else:
            raise NoOverwriteException(str(spackfile_path))
    # need to copy the spec file so the build cache can be downloaded
    # without concretizing with the current spack packages
    # and preferences
    spec_file = os.path.join(spec.prefix, ".spack", "spec.yaml")
    specfile_name = tarball_name(spec, '.spec.yaml')
    specfile_path = os.path.realpath(
        os.path.join(build_cache_dir, specfile_name))

    if os.path.exists(specfile_path):
        if force:
            os.remove(specfile_path)
        else:
            raise NoOverwriteException(str(specfile_path))
    # make a copy of the install directory to work with
    workdir = os.path.join(tempfile.mkdtemp(), os.path.basename(spec.prefix))
    install_tree(spec.prefix, workdir, symlinks=True)

    # create info for later relocation and create tar
    write_buildinfo_file(spec.prefix, workdir, rel=rel)

    # optinally make the paths in the binaries relative to each other
    # in the spack install tree before creating tarball
    if rel:
        try:
            make_package_relative(workdir, spec.prefix, allow_root)
        except Exception as e:
            shutil.rmtree(workdir)
            shutil.rmtree(tarfile_dir)
            tty.die(str(e))
    else:
        try:
            make_package_placeholder(workdir, spec.prefix, allow_root)
        except Exception as e:
            shutil.rmtree(workdir)
            shutil.rmtree(tarfile_dir)
            tty.die(str(e))
    # create compressed tarball of the install prefix
    with closing(tarfile.open(tarfile_path, 'w:gz')) as tar:
        tar.add(name='%s' % workdir,
                arcname='%s' % os.path.basename(spec.prefix))
    # remove copy of install directory
    shutil.rmtree(workdir)

    # get the sha256 checksum of the tarball
    checksum = checksum_tarball(tarfile_path)

    # add sha256 checksum to spec.yaml
    spec_dict = {}
    with open(spec_file, 'r') as inputfile:
        content = inputfile.read()
        spec_dict = syaml.load(content)
    bchecksum = {}
    bchecksum['hash_algorithm'] = 'sha256'
    bchecksum['hash'] = checksum
    spec_dict['binary_cache_checksum'] = bchecksum
    # Add original install prefix relative to layout root to spec.yaml.
    # This will be used to determine is the directory layout has changed.
    buildinfo = {}
    buildinfo['relative_prefix'] = os.path.relpath(
        spec.prefix, spack.store.layout.root)
    spec_dict['buildinfo'] = buildinfo
    spec_dict['full_hash'] = spec.full_hash()

    tty.debug('The full_hash ({0}) of {1} will be written into {2}'.format(
        spec_dict['full_hash'], spec.name, specfile_path))
    tty.debug(spec.tree())

    with open(specfile_path, 'w') as outfile:
        outfile.write(syaml.dump(spec_dict))

    # sign the tarball and spec file with gpg
    if not unsigned:
        sign_tarball(key, force, specfile_path)
    # put tarball, spec and signature files in .spack archive
    with closing(tarfile.open(spackfile_path, 'w')) as tar:
        tar.add(name='%s' % tarfile_path, arcname='%s' % tarfile_name)
        tar.add(name='%s' % specfile_path, arcname='%s' % specfile_name)
        if not unsigned:
            tar.add(name='%s.asc' % specfile_path,
                    arcname='%s.asc' % specfile_name)

    # cleanup file moved to archive
    os.remove(tarfile_path)
    if not unsigned:
        os.remove('%s.asc' % specfile_path)

    # create an index.html for the build_cache directory so specs can be found
    if regenerate_index:
        generate_package_index(build_cache_dir)

    return None