Example #1
0
def satisfyTarget(name, version_required, working_directory, update_installed=None):
    ''' returns a Target for the specified version (either to an already
        installed copy (from disk), or to a newly downloaded one), or None if
        the version could not be satisfied.

        update_installed = {None, 'Check', 'Update'}
            None:   prevent any attempt to look for new versions if the
                    target already exists
            Check:  check for new versions, and pass new version information to
                    the target object
            Update: replace any existing version with the newest available, if
                    the newest available has a higher version
    '''
    
    spec = None
    v = None
    
    target_path = os.path.join(working_directory, name)
    local_target = target.Target(
                    target_path,
               installed_linked = fsutils.isLink(target_path),
        latest_suitable_version = v
    )
    
    if local_target and (local_target.installedLinked() or update_installed != 'Update' or not local_target.outdated()):
        # if a target exists (has a valid description file), and either is
        # not outdated, or we are not updating
        return local_target

    # if we need to check for latest versions, get the latest available version
    # before checking for a local target so that we can create the local
    # target with a handle to its latest available version
    if update_installed is None:
        logger.debug('attempt to check latest version of %s @%s...' % (name, version_required))
        v = latestSuitableVersion(name, version_required, registry='targets')
    elif local_target and local_target.outdated():
        logger.info('%soutdated: %s@%s -> %s' % (
            ('update ' if update_installed == 'Update' else ''),
            name,
            local_target.getVersion(),
            v
        ))
        # must rm the old target before continuing
        fsutils.rmRf(target_path)

    if not v and update_installed is not None:
        v = latestSuitableVersion(name, version_required, registry='targets')

    if not v:
        raise access_common.TargetUnavailable(
            'Target "%s":"%s" is not a supported form.' % (name, version_required)
        )
    directory = os.path.join(working_directory, name)
    v.unpackInto(directory)
    r = target.Target(directory)
    if not r:
        raise Exception(
            'Dependency "%s":"%s" is not a valid target.' % (name, version_required)
        )
    return r
    def unpackInto(self, directory):
        logger.debug('unpack version %s from git repo %s to %s' % (self.version, self.working_copy.directory, directory))
        tag = self.tag
        fsutils.rmRf(directory)
        vcs.Git.cloneToDirectory(self.working_copy.directory, directory, tag)

        # remove temporary files created by the GitWorkingCopy clone
        self.working_copy.remove()
Example #3
0
    def unpackInto(self, directory):
        logger.debug('unpack version %s from git repo %s to %s' %
                     (self.version, self.working_copy.directory, directory))
        tag = self.tag
        fsutils.rmRf(directory)
        vcs.Git.cloneToDirectory(self.working_copy.directory, directory, tag)

        # remove temporary files created by the GitWorkingCopy clone
        self.working_copy.remove()
Example #4
0
def satisfyVersionFromSearchPaths(name,
                                  version_required,
                                  search_paths,
                                  update=False,
                                  type='module'):
    ''' returns a Component/Target for the specified version, if found in the
        list of search paths. If `update' is True, then also check for newer
        versions of the found component, and update it in-place (unless it was
        installed via a symlink).
    '''
    v = None

    try:
        local_version = searchPathsFor(
            name,
            sourceparse.parseSourceURL(version_required).semanticSpec(),
            search_paths, type)
    except pack.InvalidDescription as e:
        logger.error(e)
        return None

    logger.debug("%s %s locally" %
                 (('found', 'not found')[not local_version], name))
    if local_version:
        if update and not local_version.installedLinked():
            #logger.debug('attempt to check latest version of %s @%s...' % (name, version_required))
            v = latestSuitableVersion(name,
                                      version_required,
                                      registry=_registryNamespaceForType(type))
            if local_version:
                local_version.setLatestAvailable(v)

        # if we don't need to update, then we're done
        if local_version.installedLinked() or not local_version.outdated():
            logger.debug("satisfy component from directory: %s" %
                         local_version.path)
            # if a component exists (has a valid description file), and either is
            # not outdated, or we are not updating
            if name != local_version.getName():
                raise Exception(
                    'Component %s found in incorrectly named directory %s (%s)'
                    % (local_version.getName(), name, local_version.path))
            return local_version

        # otherwise, we need to update the installed component
        logger.info('update outdated: %s@%s -> %s' %
                    (name, local_version.getVersion(), v))
        # must rm the old component before continuing
        fsutils.rmRf(local_version.path)
        return _satisfyVersionByInstallingVersion(name,
                                                  version_required,
                                                  local_version.path,
                                                  v,
                                                  type=type)
    return None
Example #5
0
    def unpackInto(self, directory):
        logger.debug("unpack version %s from hg repo %s to %s" % (self.version, self.working_copy.directory, directory))
        if self.isTip():
            tag = None
        else:
            tag = self.tag
        fsutils.rmRf(directory)
        vcs.HG.cloneToDirectory(self.working_copy.directory, directory, tag)

        # remove temporary files created by the HGWorkingCopy clone
        self.working_copy.remove()
Example #6
0
    def unpackInto(self, directory):
        logger.debug('unpack version %s from hg repo %s to %s' %
                     (self.version, self.working_copy.directory, directory))
        if self.isTip():
            tag = None
        else:
            tag = self.tag
        fsutils.rmRf(directory)
        vcs.HG.cloneToDirectory(self.working_copy.directory, directory, tag)

        # remove temporary files created by the HGWorkingCopy clone
        self.working_copy.remove()
Example #7
0
File: access.py Project: geky/yotta
def satisfyVersionFromSearchPaths(name, version_required, search_paths, update=False, type='module', inherit_shrinkwrap=None):
    ''' returns a Component/Target for the specified version, if found in the
        list of search paths. If `update' is True, then also check for newer
        versions of the found component, and update it in-place (unless it was
        installed via a symlink).
    '''
    v    = None

    try:
        local_version = searchPathsFor(
            name,
            sourceparse.parseSourceURL(version_required).semanticSpec(),
            search_paths,
            type,
            inherit_shrinkwrap = inherit_shrinkwrap
        )
    except pack.InvalidDescription as e:
        logger.error(e)
        return None

    logger.debug("%s %s locally" % (('found', 'not found')[not local_version], name))
    if local_version:
        if update and not local_version.installedLinked():
            #logger.debug('attempt to check latest version of %s @%s...' % (name, version_required))
            v = latestSuitableVersion(name, version_required, registry=_registryNamespaceForType(type))
            if local_version:
                local_version.setLatestAvailable(v)

        # if we don't need to update, then we're done
        if local_version.installedLinked() or not local_version.outdated():
            logger.debug("satisfy component from directory: %s" % local_version.path)
            # if a component exists (has a valid description file), and either is
            # not outdated, or we are not updating
            if name != local_version.getName():
                raise Exception('Component %s found in incorrectly named directory %s (%s)' % (
                    local_version.getName(), name, local_version.path
                ))
            return local_version

        # otherwise, we need to update the installed component
        logger.info('update outdated: %s@%s -> %s' % (
            name,
            local_version.getVersion(),
            v
        ))
        # must rm the old component before continuing
        fsutils.rmRf(local_version.path)
        return _satisfyVersionByInstallingVersion(
            name, version_required, local_version.path, v, type=type, inherit_shrinkwrap=inherit_shrinkwrap
        )
    return None
Example #8
0
def unpackFrom(tar_file_path, to_directory):
    # first unpack into a sibling directory of the specified directory, and
    # then move it into place.

    # we expect our tarballs to contain a single top-level directory. We strip
    # off this name as we extract to minimise the path length

    into_parent_dir = os.path.dirname(to_directory)
    fsutils.mkDirP(into_parent_dir)
    temp_directory = tempfile.mkdtemp(dir=into_parent_dir)
    try:
        with tarfile.open(tar_file_path) as tf:
            strip_dirname = ''
            # get the extraction directory name from the first part of the
            # extraction paths: it should be the same for all members of
            # the archive
            for m in tf.getmembers():
                split_path = fsutils.fullySplitPath(m.name)
                logger.debug('process member: %s %s', m.name, split_path)
                if os.path.isabs(m.name) or '..' in split_path:
                    raise ValueError('archive uses invalid paths')
                if not strip_dirname:
                    if len(split_path) != 1 or not len(split_path[0]):
                        raise ValueError(
                            'archive does not appear to contain a single module'
                        )
                    strip_dirname = split_path[0]
                    continue
                else:
                    if split_path[0] != strip_dirname:
                        raise ValueError(
                            'archive does not appear to contain a single module'
                        )
                m.name = os.path.join(*split_path[1:])
                tf.extract(m, path=temp_directory)
        # make sure the destination directory doesn't exist:
        fsutils.rmRf(to_directory)
        shutil.move(temp_directory, to_directory)
        temp_directory = None
        logger.debug('extraction complete %s', to_directory)
    except IOError as e:
        if e.errno != errno.ENOENT:
            logger.error('failed to extract tarfile %s', e)
            fsutils.rmF(tar_file_path)
        raise
    finally:
        if temp_directory is not None:
            # if anything has failed, cleanup
            fsutils.rmRf(temp_directory)
Example #9
0
def unpackFrom(tar_file_path, to_directory):
    # first unpack into a sibling directory of the specified directory, and
    # then move it into place.

    # we expect our tarballs to contain a single top-level directory. We strip
    # off this name as we extract to minimise the path length

    into_parent_dir = os.path.dirname(to_directory)
    fsutils.mkDirP(into_parent_dir)
    temp_directory = tempfile.mkdtemp(dir=into_parent_dir)
    try:
        with tarfile.open(tar_file_path) as tf:
            strip_dirname = ''
            # get the extraction directory name from the first part of the
            # extraction paths: it should be the same for all members of
            # the archive
            for m in tf.getmembers():
                split_path = fsutils.fullySplitPath(m.name)
                logger.debug('process member: %s %s', m.name, split_path)
                if os.path.isabs(m.name) or '..' in split_path:
                    raise ValueError('archive uses invalid paths')
                if not strip_dirname:
                    if len(split_path) != 1 or not len(split_path[0]):
                        raise ValueError('archive does not appear to contain a single module')
                    strip_dirname = split_path[0]
                    continue
                else:
                    if split_path[0] != strip_dirname:
                        raise ValueError('archive does not appear to contain a single module')
                m.name = os.path.join(*split_path[1:])
                tf.extract(m, path=temp_directory)
        # make sure the destination directory doesn't exist:
        fsutils.rmRf(to_directory)
        shutil.move(temp_directory, to_directory)
        temp_directory = None
        logger.debug('extraction complete %s', to_directory)
    except IOError as e:
        if e.errno != errno.ENOENT:
            logger.error('failed to extract tarfile %s', e)
            fsutils.rmF(tar_file_path)
        raise
    finally:
        if temp_directory is not None:
            # if anything has failed, cleanup
            fsutils.rmRf(temp_directory)
Example #10
0
def satisfyVersionFromSearchPaths(name,
                                  version_required,
                                  search_paths,
                                  update=False):
    ''' returns a Component for the specified version, if found in the list of
        search paths. If `update' is True, then also check for newer versions
        of the found component, and update it in-place (unless it was installed
        via a symlink).
    '''
    spec = None
    v = None

    local_component = searchPathsForComponent(name, version_required,
                                              search_paths)
    logger.debug("%s %s locally" %
                 (('found', 'not found')[not local_component], name))
    if local_component:
        if update and not local_component.installedLinked():
            #logger.debug('attempt to check latest version of %s @%s...' % (name, version_required))
            v = latestSuitableVersion(name, version_required)
            if local_component:
                local_component.setLatestAvailable(v)

        # if we don't need to update, then we're done
        if local_component.installedLinked() or not local_component.outdated():
            logger.debug("satisfy component from directory: %s" %
                         local_component.path)
            # if a component exists (has a valid description file), and either is
            # not outdated, or we are not updating
            if name != local_component.getName():
                raise Exception(
                    'Component %s found in incorrectly named directory %s (%s)'
                    % (local_component.getName(), name, local_component.path))
            return local_component

        # otherwise, we need to update the installed component
        logger.info('update outdated: %s@%s -> %s' %
                    (name, local_component.getVersion(), v))
        # must rm the old component before continuing
        fsutils.rmRf(local_component.path)
        return _satisfyVersionByInstallingVersion(name, version_required,
                                                  local_component.path, v)
    return None
Example #11
0
def satisfyVersionFromSearchPaths(name, version_required, search_paths, update=False):
    ''' returns a Component for the specified version, if found in the list of
        search paths. If `update' is True, then also check for newer versions
        of the found component, and update it in-place (unless it was installed
        via a symlink).
    '''
    spec = None
    v    = None

    local_component = searchPathsForComponent(name, version_required, search_paths)
    logger.debug("%s %s locally" % (('found', 'not found')[not local_component], name))
    if local_component:
        if update and not local_component.installedLinked():
            #logger.debug('attempt to check latest version of %s @%s...' % (name, version_required))
            v = latestSuitableVersion(name, version_required)
            if local_component:
                local_component.setLatestAvailable(v)

        # if we don't need to update, then we're done
        if local_component.installedLinked() or not local_component.outdated():
            logger.debug("satisfy component from directory: %s" % local_component.path)
            # if a component exists (has a valid description file), and either is
            # not outdated, or we are not updating
            if name != local_component.getName():
                raise Exception('Component %s found in incorrectly named directory %s (%s)' % (
                    local_component.getName(), name, local_component.path
                ))
            return local_component
        
        # otherwise, we need to update the installed component
        logger.info('update outdated: %s@%s -> %s' % (
            name,
            local_component.getVersion(),
            v
        ))
        # must rm the old component before continuing
        fsutils.rmRf(local_component.path)
        return _satisfyVersionByInstallingVersion(name, version_required, local_component.path, v)
    return None
Example #12
0
 def remove(self):
     fsutils.rmRf(self.worktree)
Example #13
0
 def remove(self):
     fsutils.rmRf(self.worktree)
Example #14
0
def satisfyTarget(name,
                  version_required,
                  working_directory,
                  update_installed=None):
    ''' returns a Target for the specified version (either to an already
        installed copy (from disk), or to a newly downloaded one), or None if
        the version could not be satisfied.

        update_installed = {None, 'Check', 'Update'}
            None:   prevent any attempt to look for new versions if the
                    target already exists
            Check:  check for new versions, and pass new version information to
                    the target object
            Update: replace any existing version with the newest available, if
                    the newest available has a higher version
    '''

    spec = None
    v = None

    target_path = os.path.join(working_directory, name)
    local_target = target.Target(target_path,
                                 installed_linked=fsutils.isLink(target_path),
                                 latest_suitable_version=v)

    if local_target and (local_target.installedLinked()
                         or update_installed != 'Update'
                         or not local_target.outdated()):
        # if a target exists (has a valid description file), and either is
        # not outdated, or we are not updating
        return local_target

    # if we need to check for latest versions, get the latest available version
    # before checking for a local target so that we can create the local
    # target with a handle to its latest available version
    if update_installed is None:
        logger.debug('attempt to check latest version of %s @%s...' %
                     (name, version_required))
        v = latestSuitableVersion(name, version_required, registry='targets')
    elif local_target and local_target.outdated():
        logger.info('%soutdated: %s@%s -> %s' %
                    (('update ' if update_installed == 'Update' else ''), name,
                     local_target.getVersion(), v))
        # must rm the old target before continuing
        fsutils.rmRf(target_path)

    if not v and update_installed is not None:
        v = latestSuitableVersion(name, version_required, registry='targets')

    if not v:
        raise access_common.TargetUnavailable(
            '"%s" is not a supported specification for a target (the target is %s)'
            % (version_required, name))
    directory = os.path.join(working_directory, name)
    v.unpackInto(directory)
    r = target.Target(directory)
    if not r:
        raise Exception(
            '"%s":"%s" is not a valid target (its description file is invalid)'
            % (name, version_required))
    return r
Example #15
0
def unpackTarballStream(stream, into_directory, hash=(None, None)):
    ''' Unpack a stream-like object that contains a tarball into a directory
    '''
    hash_name = hash[0]
    hash_value = hash[1]

    if hash_name:
        m = getattr(hashlib, hash_name)()

    into_parent_dir = os.path.dirname(into_directory)
    fsutils.mkDirP(into_parent_dir)
    temp_directory = tempfile.mkdtemp(dir=into_parent_dir)
    download_fname = os.path.join(temp_directory, 'download.tar.gz')
    # remove any partially downloaded file: TODO: checksumming & caching of
    # downloaded components in some central place
    fsutils.rmF(download_fname)
    # create the archive exclusively, we don't want someone else maliciously
    # overwriting our tar archive with something that unpacks to an absolute
    # path when we might be running sudo'd
    try:
        fd = os.open(
            download_fname,
            os.O_CREAT | os.O_EXCL | os.O_RDWR | getattr(os, "O_BINARY", 0))
        with os.fdopen(fd, 'rb+') as f:
            f.seek(0)

            for chunk in stream.iter_content(1024):
                f.write(chunk)
                if hash_name:
                    m.update(chunk)

            if hash_name:
                calculated_hash = m.hexdigest()
                logging.debug('calculated hash: %s check against: %s' %
                              (calculated_hash, hash_value))
                if hash_value and (hash_value != calculated_hash):
                    raise Exception('Hash verification failed.')
            f.truncate()
            logging.debug('got file, extract into %s (for %s)', temp_directory,
                          into_directory)
            # head back to the start of the file and untar (without closing the
            # file)
            f.seek(0)
            f.flush()
            os.fsync(f)
            with tarfile.open(fileobj=f) as tf:
                to_extract = []
                # modify members to change where they extract to!
                for m in tf.getmembers():
                    split_path = fsutils.fullySplitPath(m.name)
                    if len(split_path) > 1:
                        m.name = os.path.join(*(split_path[1:]))
                        to_extract.append(m)
                tf.extractall(path=temp_directory, members=to_extract)

        # remove the temporary download file, maybe in the future we will cache
        # these somewhere
        fsutils.rmRf(os.path.join(into_directory, 'download.tar.gz'))

        # move the directory we extracted stuff into to where we actually want it
        # to be
        fsutils.rmRf(into_directory)
        shutil.move(temp_directory, into_directory)

    finally:
        fsutils.rmRf(temp_directory)

    logging.debug('extraction complete %s', into_directory)
Example #16
0
def unpackTarballStream(stream, into_directory, hash=(None, None)):
    ''' Unpack a stream-like object that contains a tarball into a directory
    '''
    hash_name = hash[0]
    hash_value = hash[1]

    if hash_name:
        m = getattr(hashlib, hash_name)()

    into_parent_dir = os.path.dirname(into_directory)
    fsutils.mkDirP(into_parent_dir)
    temp_directory = tempfile.mkdtemp(dir=into_parent_dir)
    download_fname = os.path.join(temp_directory, 'download.tar.gz')
    # remove any partially downloaded file: TODO: checksumming & caching of
    # downloaded components in some central place
    fsutils.rmF(download_fname)
    # create the archive exclusively, we don't want someone else maliciously
    # overwriting our tar archive with something that unpacks to an absolute
    # path when we might be running sudo'd
    try:
        fd = os.open(download_fname, os.O_CREAT | os.O_EXCL |
                                     os.O_RDWR | getattr(os, "O_BINARY", 0))
        with os.fdopen(fd, 'rb+') as f:
            f.seek(0)
            
            for chunk in stream.iter_content(1024):
                f.write(chunk)
                if hash_name:
                    m.update(chunk)

            if hash_name:
                calculated_hash = m.hexdigest()
                logging.debug(
                    'calculated hash: %s check against: %s' % (calculated_hash, hash_value))
                if hash_value and (hash_value != calculated_hash):
                    raise Exception('Hash verification failed.')
            f.truncate()
            logging.debug(
                'got file, extract into %s (for %s)', temp_directory, into_directory)
            # head back to the start of the file and untar (without closing the
            # file)
            f.seek(0)
            f.flush()
            os.fsync(f)
            with tarfile.open(fileobj=f) as tf:
                to_extract = []
                # modify members to change where they extract to!
                for m in tf.getmembers():
                    split_path = fsutils.fullySplitPath(m.name)
                    if len(split_path) > 1:
                        m.name = os.path.join(*(split_path[1:]))
                        to_extract.append(m)
                tf.extractall(path=temp_directory, members=to_extract)

        # remove the temporary download file, maybe in the future we will cache
        # these somewhere
        fsutils.rmRf(os.path.join(into_directory, 'download.tar.gz'))

        # move the directory we extracted stuff into to where we actually want it
        # to be
        fsutils.rmRf(into_directory)
        shutil.move(temp_directory, into_directory)

    finally:
        fsutils.rmRf(temp_directory)

    logging.debug('extraction complete %s', into_directory)
Example #17
0
def unpackTarballStream(stream, into_directory, hash=(None, None)):
    ''' Unpack a responses stream that contains a tarball into a directory
    '''
    hash_name = hash[0]
    hash_value = hash[1]

    if hash_name:
        m = getattr(hashlib, hash_name)()

    into_parent_dir = os.path.dirname(into_directory)
    fsutils.mkDirP(into_parent_dir)
    temp_directory = tempfile.mkdtemp(dir=into_parent_dir)
    download_fname = os.path.join(temp_directory, 'download.tar.gz')
    # remove any partially downloaded file: TODO: checksumming & caching of
    # downloaded components in some central place
    fsutils.rmF(download_fname)
    # create the archive exclusively, we don't want someone else maliciously
    # overwriting our tar archive with something that unpacks to an absolute
    # path when we might be running sudo'd
    try:
        with _openExclusively(download_fname) as f:
            f.seek(0)
            for chunk in stream.iter_content(1024):
                f.write(chunk)
                if hash_name:
                    m.update(chunk)

            if hash_name:
                calculated_hash = m.hexdigest()
                logger.debug(
                    'calculated %s hash: %s check against: %s' % (
                        hash_name, calculated_hash, hash_value
                    )
                )
                if hash_value and (hash_value != calculated_hash):
                    raise Exception('Hash verification failed.')
            logger.debug('wrote tarfile of size: %s to %s', f.tell(), download_fname)
            f.truncate()
            logger.debug(
                'got file, extract into %s (for %s)', temp_directory, into_directory
            )
            # head back to the start of the file and untar (without closing the
            # file)
            f.seek(0)
            f.flush()
            os.fsync(f)
            with tarfile.open(fileobj=f) as tf:
                extracted_dirname = ''
                # get the extraction directory name from the first part of the
                # extraction paths: it should be the same for all members of
                # the archive
                for m in tf.getmembers():
                    split_path = fsutils.fullySplitPath(m.name)
                    if len(split_path) > 1:
                        if extracted_dirname:
                            if split_path[0] != extracted_dirname:
                                raise ValueError('archive does not appear to contain a single module')
                        else:
                            extracted_dirname = split_path[0]
                tf.extractall(path=temp_directory)

        # move the directory we extracted stuff into to where we actually want it
        # to be
        fsutils.rmRf(into_directory)
        shutil.move(os.path.join(temp_directory, extracted_dirname), into_directory)

    finally:
        fsutils.rmF(download_fname)
        fsutils.rmRf(temp_directory)

    logger.debug('extraction complete %s', into_directory)
Example #18
0
def unpackTarballStream(stream, into_directory, hash=(None, None)):
    ''' Unpack a responses stream that contains a tarball into a directory
    '''
    hash_name = hash[0]
    hash_value = hash[1]

    if hash_name:
        m = getattr(hashlib, hash_name)()

    into_parent_dir = os.path.dirname(into_directory)
    fsutils.mkDirP(into_parent_dir)
    temp_directory = tempfile.mkdtemp(dir=into_parent_dir)
    download_fname = os.path.join(temp_directory, 'download.tar.gz')
    # remove any partially downloaded file: TODO: checksumming & caching of
    # downloaded components in some central place
    fsutils.rmF(download_fname)
    # create the archive exclusively, we don't want someone else maliciously
    # overwriting our tar archive with something that unpacks to an absolute
    # path when we might be running sudo'd
    try:
        with _openExclusively(download_fname) as f:
            f.seek(0)
            for chunk in stream.iter_content(1024):
                f.write(chunk)
                if hash_name:
                    m.update(chunk)

            if hash_name:
                calculated_hash = m.hexdigest()
                logger.debug(
                    'calculated %s hash: %s check against: %s' % (
                        hash_name, calculated_hash, hash_value
                    )
                )
                if hash_value and (hash_value != calculated_hash):
                    raise Exception('Hash verification failed.')
            logger.debug('wrote tarfile of size: %s to %s', f.tell(), download_fname)
            f.truncate()
            logger.debug(
                'got file, extract into %s (for %s)', temp_directory, into_directory
            )
            # head back to the start of the file and untar (without closing the
            # file)
            f.seek(0)
            f.flush()
            os.fsync(f)
            with tarfile.open(fileobj=f) as tf:
                extracted_dirname = ''
                # get the extraction directory name from the first part of the
                # extraction paths: it should be the same for all members of
                # the archive
                for m in tf.getmembers():
                    split_path = fsutils.fullySplitPath(m.name)
                    if len(split_path) > 1:
                        if extracted_dirname:
                            if split_path[0] != extracted_dirname:
                                raise ValueError('archive does not appear to contain a single module')
                        else:
                            extracted_dirname = split_path[0]
                tf.extractall(path=temp_directory)

        # move the directory we extracted stuff into to where we actually want it
        # to be
        fsutils.rmRf(into_directory)
        shutil.move(os.path.join(temp_directory, extracted_dirname), into_directory)

    finally:
        fsutils.rmF(download_fname)
        fsutils.rmRf(temp_directory)

    logger.debug('extraction complete %s', into_directory)