Esempio n. 1
0
    def archive(self, destination):
        """Just moves this archive to the destination."""
        if not self.archive_file:
            raise NoArchiveFileError("Cannot call archive() before fetching.")

        if not extension(destination) == extension(self.archive_file):
            raise ValueError("Cannot archive without matching extensions.")

        shutil.move(self.archive_file, destination)
Esempio n. 2
0
    def archive(self, destination):
        """Just moves this archive to the destination."""
        if not self.archive_file:
            raise NoArchiveFileError("Cannot call archive() before fetching.")

        if not extension(destination) == extension(self.archive_file):
            raise ValueError("Cannot archive without matching extensions.")

        shutil.move(self.archive_file, destination)
Esempio n. 3
0
File: url.py Progetto: wangvsa/spack
def split_url_extension(path):
    """Some URLs have a query string, e.g.:

    1. https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true
    2. http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin.tar.gz
    3. https://gitlab.kitware.com/vtk/vtk/repository/archive.tar.bz2?ref=v7.0.0

    In (1), the query string needs to be stripped to get at the
    extension, but in (2) & (3), the filename is IN a single final query
    argument.

    This strips the URL into three pieces: ``prefix``, ``ext``, and ``suffix``.
    The suffix contains anything that was stripped off the URL to
    get at the file extension.  In (1), it will be ``'?raw=true'``, but
    in (2), it will be empty. In (3) the suffix is a parameter that follows
    after the file extension, e.g.:

    1. ``('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7', '.tgz', '?raw=true')``
    2. ``('http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin', '.tar.gz', None)``
    3. ``('https://gitlab.kitware.com/vtk/vtk/repository/archive', '.tar.bz2', '?ref=v7.0.0')``
    """
    prefix, ext, suffix = path, '', ''

    # Strip off sourceforge download suffix.
    # e.g. https://sourceforge.net/projects/glew/files/glew/2.0.0/glew-2.0.0.tgz/download
    match = re.search(r'(.*(?:sourceforge\.net|sf\.net)/.*)(/download)$', path)
    if match:
        prefix, suffix = match.groups()

    ext = comp.extension(prefix)
    if ext is not None:
        prefix = comp.strip_extension(prefix)

    else:
        prefix, suf = strip_query_and_fragment(prefix)
        ext = comp.extension(prefix)
        prefix = comp.strip_extension(prefix)
        suffix = suf + suffix
        if ext is None:
            ext = ''

    return prefix, ext, suffix
Esempio n. 4
0
File: url.py Progetto: LLNL/spack
def split_url_extension(path):
    """Some URLs have a query string, e.g.:

    1. https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true
    2. http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin.tar.gz
    3. https://gitlab.kitware.com/vtk/vtk/repository/archive.tar.bz2?ref=v7.0.0

    In (1), the query string needs to be stripped to get at the
    extension, but in (2) & (3), the filename is IN a single final query
    argument.

    This strips the URL into three pieces: ``prefix``, ``ext``, and ``suffix``.
    The suffix contains anything that was stripped off the URL to
    get at the file extension.  In (1), it will be ``'?raw=true'``, but
    in (2), it will be empty. In (3) the suffix is a parameter that follows
    after the file extension, e.g.:

    1. ``('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7', '.tgz', '?raw=true')``
    2. ``('http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin', '.tar.gz', None)``
    3. ``('https://gitlab.kitware.com/vtk/vtk/repository/archive', '.tar.bz2', '?ref=v7.0.0')``
    """
    prefix, ext, suffix = path, '', ''

    # Strip off sourceforge download suffix.
    # e.g. https://sourceforge.net/projects/glew/files/glew/2.0.0/glew-2.0.0.tgz/download
    match = re.search(r'(.*(?:sourceforge\.net|sf\.net)/.*)(/download)$', path)
    if match:
        prefix, suffix = match.groups()

    ext = comp.extension(prefix)
    if ext is not None:
        prefix = comp.strip_extension(prefix)

    else:
        prefix, suf = strip_query_and_fragment(prefix)
        ext = comp.extension(prefix)
        prefix = comp.strip_extension(prefix)
        suffix = suf + suffix
        if ext is None:
            ext = ''

    return prefix, ext, suffix
Esempio n. 5
0
    def stage(self):
        if not self.spec.concrete:
            raise ValueError("Can only get a stage for a concrete package.")

        if self._stage is None:
            # TODO: move this logic into a mirror module.
            mirror_path = "%s/%s" % (self.name, "%s-%s.%s" % (
                self.name, self.version, extension(self.url)))
            self._stage = Stage(
                self.url, mirror_path=mirror_path, name=self.spec.short_spec)
        return self._stage
Esempio n. 6
0
def test_native_unpacking(tmpdir_factory, archive_file):
    extension = scomp.extension(archive_file)
    util = scomp.decompressor_for(archive_file, extension)
    tmpdir = tmpdir_factory.mktemp("comp_test")
    with working_dir(str(tmpdir)):
        assert not os.listdir(os.getcwd())
        util(archive_file)
        files = os.listdir(os.getcwd())
        assert len(files) == 1
        with open(files[0], 'r') as f:
            contents = f.read()
        assert 'TEST' in contents
Esempio n. 7
0
File: url.py Progetto: trws/spack
def split_url_extension(path):
    """Some URLs have a query string, e.g.:

          1. https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true
          2. http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin.tar.gz

       In (1), the query string needs to be stripped to get at the
       extension, but in (2), the filename is IN a single final query
       argument.

       This strips the URL into three pieces: prefix, ext, and suffix.
       The suffix contains anything that was stripped off the URL to
       get at the file extension.  In (1), it will be '?raw=true', but
       in (2), it will be empty. e.g.:

           1. ('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7', '.tgz', '?raw=true')
           2. ('http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin',
               '.tar.gz', None)
    """
    prefix, ext, suffix = path, '', ''

    # Strip off sourceforge download suffix.
    match = re.search(r'((?:sourceforge.net|sf.net)/.*)(/download)$', path)
    if match:
        prefix, suffix = match.groups()

    ext = comp.extension(prefix)
    if ext is not None:
        prefix = comp.strip_extension(prefix)

    else:
        prefix, suf = strip_query_and_fragment(prefix)
        ext = comp.extension(prefix)
        prefix = comp.strip_extension(prefix)
        suffix = suf + suffix
        if ext is None:
            ext = ''

    return prefix, ext, suffix
Esempio n. 8
0
File: url.py Progetto: rorist/spack
def split_url_extension(path):
    """Some URLs have a query string, e.g.:

          1. https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true
          2. http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin.tar.gz

       In (1), the query string needs to be stripped to get at the
       extension, but in (2), the filename is IN a single final query
       argument.

       This strips the URL into three pieces: prefix, ext, and suffix.
       The suffix contains anything that was stripped off the URL to
       get at the file extension.  In (1), it will be '?raw=true', but
       in (2), it will be empty. e.g.:

           1. ('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7', '.tgz', '?raw=true')
           2. ('http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin',
               '.tar.gz', None)
    """
    prefix, ext, suffix = path, '', ''

    # Strip off sourceforge download suffix.
    match = re.search(r'((?:sourceforge.net|sf.net)/.*)(/download)$', path)
    if match:
        prefix, suffix = match.groups()

    ext = comp.extension(prefix)
    if ext is not None:
        prefix = comp.strip_extension(prefix)

    else:
        prefix, suf = strip_query_and_fragment(prefix)
        ext = comp.extension(prefix)
        prefix = comp.strip_extension(prefix)
        suffix = suf + suffix
        if ext is None:
            ext = ''

    return prefix, ext, suffix
Esempio n. 9
0
def mirror_archive_filename(spec):
    """Get the path that this spec will live at within a mirror."""
    if not spec.version.concrete:
        raise ValueError("mirror.path requires spec with concrete version.")

    fetcher = spec.package.fetcher
    if isinstance(fetcher, fs.URLFetchStrategy):
        # If we fetch this version with a URLFetchStrategy, use URL's archive type
        ext = extension(fetcher.url)
    else:
        # Otherwise we'll make a .tar.gz ourselves
        ext = 'tar.gz'

    return "%s-%s.%s" % (spec.package.name, spec.version, ext)
Esempio n. 10
0
    def archive(self, destination, **kwargs):
        assert (extension(destination) == 'tar.gz')
        assert (self.stage.source_path.startswith(self.stage.path))

        tar = which('tar', required=True)

        patterns = kwargs.get('exclude', None)
        if patterns is not None:
            if isinstance(patterns, basestring):
                patterns = [patterns]
            for p in patterns:
                tar.add_default_arg('--exclude=%s' % p)

        self.stage.chdir()
        tar('-czf', destination, os.path.basename(self.stage.source_path))
Esempio n. 11
0
    def archive(self, destination, **kwargs):
        assert (extension(destination) == 'tar.gz')
        assert (self.stage.source_path.startswith(self.stage.path))

        tar = which('tar', required=True)

        patterns = kwargs.get('exclude', None)
        if patterns is not None:
            if isinstance(patterns, basestring):
                patterns = [patterns]
            for p in patterns:
                tar.add_default_arg('--exclude=%s' % p)

        self.stage.chdir()
        tar('-czf', destination, os.path.basename(self.stage.source_path))
Esempio n. 12
0
    def expand(self):
        if not self.expand_archive:
            tty.msg("Skipping expand step for %s" % self.archive_file)
            return

        tty.msg("Staging archive: %s" % self.archive_file)

        if not self.archive_file:
            raise NoArchiveFileError(
                "Couldn't find archive file",
                "Failed on expand() for URL %s" % self.url)

        if not self.extension:
            self.extension = extension(self.archive_file)

        decompress = decompressor_for(self.archive_file, self.extension)

        # Expand all tarballs in their own directory to contain
        # exploding tarballs.
        tarball_container = os.path.join(self.stage.path,
                                         "spack-expanded-archive")

        mkdirp(tarball_container)
        with working_dir(tarball_container):
            decompress(self.archive_file)

        # Check for an exploding tarball, i.e. one that doesn't expand
        # to a single directory.  If the tarball *didn't* explode,
        # move contents up & remove the container directory.
        #
        # NOTE: The tar program on Mac OS X will encode HFS metadata
        # in hidden files, which can end up *alongside* a single
        # top-level directory.  We ignore hidden files to accomodate
        # these "semi-exploding" tarballs.
        files = os.listdir(tarball_container)
        non_hidden = [f for f in files if not f.startswith('.')]
        if len(non_hidden) == 1:
            expanded_dir = os.path.join(tarball_container, non_hidden[0])
            if os.path.isdir(expanded_dir):
                for f in files:
                    shutil.move(os.path.join(tarball_container, f),
                                os.path.join(self.stage.path, f))
                os.rmdir(tarball_container)

        if not files:
            os.rmdir(tarball_container)
Esempio n. 13
0
    def expand(self):
        if not self.expand_archive:
            tty.msg("Skipping expand step for %s" % self.archive_file)
            return

        tty.msg("Staging archive: %s" % self.archive_file)

        self.stage.chdir()
        if not self.archive_file:
            raise NoArchiveFileError(
                "Couldn't find archive file",
                "Failed on expand() for URL %s" % self.url)

        if not self.extension:
            self.extension = extension(self.archive_file)
        decompress = decompressor_for(self.archive_file, self.extension)

        # Expand all tarballs in their own directory to contain
        # exploding tarballs.
        tarball_container = os.path.join(self.stage.path,
                                         "spack-expanded-archive")
        mkdirp(tarball_container)
        os.chdir(tarball_container)
        decompress(self.archive_file)

        # Check for an exploding tarball, i.e. one that doesn't expand
        # to a single directory.  If the tarball *didn't* explode,
        # move contents up & remove the container directory.
        #
        # NOTE: The tar program on Mac OS X will encode HFS metadata
        # in hidden files, which can end up *alongside* a single
        # top-level directory.  We ignore hidden files to accomodate
        # these "semi-exploding" tarballs.
        files = os.listdir(tarball_container)
        non_hidden = [f for f in files if not f.startswith('.')]
        if len(non_hidden) == 1:
            expanded_dir = os.path.join(tarball_container, non_hidden[0])
            if os.path.isdir(expanded_dir):
                for f in files:
                    shutil.move(os.path.join(tarball_container, f),
                                os.path.join(self.stage.path, f))
                os.rmdir(tarball_container)
        if not files:
            os.rmdir(tarball_container)
        # Set the wd back to the stage when done.
        self.stage.chdir()
Esempio n. 14
0
    def stage(self):
        if not self.spec.concrete:
            raise ValueError("Can only get a stage for a concrete package.")

        if self._stage is None:
            if not self.url:
                raise PackageVersionError(self.version)

            # TODO: move this logic into a mirror module.
            mirror_path = "%s/%s" % (
                self.name, "%s-%s.%s" %
                (self.name, self.version, extension(self.url)))

            self._stage = Stage(self.url,
                                mirror_path=mirror_path,
                                name=self.spec.short_spec)
        return self._stage
Esempio n. 15
0
    def archive(self, destination, **kwargs):
        assert (extension(destination) == 'tar.gz')
        assert (self.stage.source_path.startswith(self.stage.path))

        tar = which('tar', required=True)

        patterns = kwargs.get('exclude', None)
        if patterns is not None:
            if isinstance(patterns, string_types):
                patterns = [patterns]
            for p in patterns:
                tar.add_default_arg('--exclude=%s' % p)

        with working_dir(self.stage.path):
            if self.stage.srcdir:
                # Here we create an archive with the default repository name.
                # The 'tar' command has options for changing the name of a
                # directory that is included in the archive, but they differ
                # based on OS, so we temporarily rename the repo
                with temp_rename(self.stage.source_path, self.stage.srcdir):
                    tar('-czf', destination, self.stage.srcdir)
            else:
                tar('-czf', destination,
                    os.path.basename(self.stage.source_path))
Esempio n. 16
0
File: url.py Progetto: rorist/spack
def substitution_offsets(path):
    """This returns offsets for substituting versions and names in the provided path.
       It is a helper for substitute_version().
    """
    # Get name and version offsets
    try:
        ver, vs, vl = parse_version_offset(path)
        name, ns, nl = parse_name_offset(path, ver)
    except UndetectableNameError, e:
        return (None, -1, -1, (), ver, vs, vl, (vs, ))
    except UndetectableVersionError, e:
        return (None, -1, -1, (), None, -1, -1, ())

    # protect extensions like bz2 from getting inadvertently
    # considered versions.
    ext = comp.extension(path)
    path = comp.strip_extension(path)

    # Construct a case-insensitive regular expression for the package name.
    name_re = '(%s)' % insensitize(name)

    # Split the string apart by things that match the name so that if the
    # name contains numbers or things that look like versions, we don't
    # accidentally substitute them with a version.
    name_parts = re.split(name_re, path)

    offsets = cumsum(name_parts, 0, len)
    name_offsets = offsets[1::2]

    ver_offsets = []
    for i in xrange(0, len(name_parts), 2):
Esempio n. 17
0
def test_get_extension(archive):
    ext = scomp.extension(archive)
    assert ext_archive[ext] == archive
Esempio n. 18
0
def test_get_bad_extension():
    archive = 'Foo.py'
    ext = scomp.extension(archive)
    assert ext is None
Esempio n. 19
0
                    % pkg.name)
            continue

        # create a subdir for the current package.
        pkg_path = join_path(args.directory, pkg.name)
        mkdirp(pkg_path)

        # Download all the tarballs using Stages, then move them into place
        for version in pkg.versions:
            # Skip versions that don't match the spec
            vspec = Spec('%s@%s' % (pkg.name, version))
            if not vspec.satisfies(spec):
                continue

            mirror_path = "%s/%s-%s.%s" % (
                pkg.name, pkg.name, version, extension(pkg.url))

            os.chdir(working_dir)
            mirror_file = join_path(args.directory, mirror_path)
            if os.path.exists(mirror_file):
                tty.msg("Already fetched %s." % mirror_file)
                num_mirrored += 1
                continue

            # Get the URL for the version and set up a stage to download it.
            url = pkg.url_for_version(version)
            stage = Stage(url)
            try:
                # fetch changes directory into the stage
                stage.fetch()
Esempio n. 20
0
File: url.py Progetto: trws/spack
def substitution_offsets(path):
    """This returns offsets for substituting versions and names in the provided path.
       It is a helper for substitute_version().
    """
    # Get name and version offsets
    try:
        ver,  vs, vl = parse_version_offset(path)
        name, ns, nl = parse_name_offset(path, ver)
    except UndetectableNameError, e:
        return (None, -1, -1, (), ver, vs, vl, (vs,))
    except UndetectableVersionError, e:
        return (None, -1, -1, (), None, -1, -1, ())

    # protect extensions like bz2 from getting inadvertently
    # considered versions.
    ext = comp.extension(path)
    path = comp.strip_extension(path)

    # Construct a case-insensitive regular expression for the package name.
    name_re = '(%s)' % insensitize(name)

    # Split the string apart by things that match the name so that if the
    # name contains numbers or things that look like versions, we don't
    # accidentally substitute them with a version.
    name_parts = re.split(name_re, path)

    offsets = cumsum(name_parts, 0, len)
    name_offsets = offsets[1::2]

    ver_offsets = []
    for i in xrange(0, len(name_parts), 2):
Esempio n. 21
0
                    pkg.name)
            continue

        # create a subdir for the current package.
        pkg_path = join_path(args.directory, pkg.name)
        mkdirp(pkg_path)

        # Download all the tarballs using Stages, then move them into place
        for version in pkg.versions:
            # Skip versions that don't match the spec
            vspec = Spec('%s@%s' % (pkg.name, version))
            if not vspec.satisfies(spec):
                continue

            mirror_path = "%s/%s-%s.%s" % (pkg.name, pkg.name, version,
                                           extension(pkg.url))

            os.chdir(working_dir)
            mirror_file = join_path(args.directory, mirror_path)
            if os.path.exists(mirror_file):
                tty.msg("Already fetched %s." % mirror_file)
                num_mirrored += 1
                continue

            # Get the URL for the version and set up a stage to download it.
            url = pkg.url_for_version(version)
            stage = Stage(url)
            try:
                # fetch changes directory into the stage
                stage.fetch()
Esempio n. 22
0
    def expand(self):
        if not self.expand_archive:
            tty.msg("Staging unexpanded archive %s in %s" %
                    (self.archive_file, self.stage.source_path))
            if not self.stage.expanded:
                mkdirp(self.stage.source_path)
            dest = os.path.join(self.stage.source_path,
                                os.path.basename(self.archive_file))
            # if the archive is a symlink itself, copy the target because
            # otherwise the symlink target might get modified by
            # staging-operations
            if os.path.islink(self.archive_file):
                shutil.copy(self.archive_file, dest)
            else:
                shutil.move(self.archive_file, dest)
            return

        tty.msg("Staging archive: %s" % self.archive_file)

        if not self.archive_file:
            raise NoArchiveFileError(
                "Couldn't find archive file",
                "Failed on expand() for URL %s" % self.url)

        if not self.extension:
            self.extension = extension(self.archive_file)

        if self.stage.expanded:
            tty.debug('Source already staged to %s' % self.stage.source_path)
            return

        decompress = decompressor_for(self.archive_file, self.extension)

        # Expand all tarballs in their own directory to contain
        # exploding tarballs.
        tarball_container = os.path.join(self.stage.path,
                                         "spack-expanded-archive")

        mkdirp(tarball_container)
        with working_dir(tarball_container):
            decompress(self.archive_file)

        # Check for an exploding tarball, i.e. one that doesn't expand to
        # a single directory.  If the tarball *didn't* explode, move its
        # contents to the staging source directory & remove the container
        # directory.  If the tarball did explode, just rename the tarball
        # directory to the staging source directory.
        #
        # NOTE: The tar program on Mac OS X will encode HFS metadata in
        # hidden files, which can end up *alongside* a single top-level
        # directory.  We initially ignore presence of hidden files to
        # accomodate these "semi-exploding" tarballs but ensure the files
        # are copied to the source directory.
        files = os.listdir(tarball_container)
        non_hidden = [f for f in files if not f.startswith('.')]
        if len(non_hidden) == 1:
            src = os.path.join(tarball_container, non_hidden[0])
            if os.path.isdir(src):
                self.stage.srcdir = non_hidden[0]
                shutil.move(src, self.stage.source_path)
                if len(files) > 1:
                    files.remove(non_hidden[0])
                    for f in files:
                        src = os.path.join(tarball_container, f)
                        dest = os.path.join(self.stage.path, f)
                        shutil.move(src, dest)
                os.rmdir(tarball_container)
            else:
                # This is a non-directory entry (e.g., a patch file) so simply
                # rename the tarball container to be the source path.
                shutil.move(tarball_container, self.stage.source_path)

        else:
            shutil.move(tarball_container, self.stage.source_path)