def __init__(self, specuri, authinfo = None): # process args if not isinstance(specuri, URI): specuri = URI(specuri) if authinfo: specuri.set_auth_info(authinfo) self.authinfo = authinfo # read spec file, we'll need it :) self.set_spec_file(specuri) if specuri.is_remote_file(): #make local here and f**k up self.specdir = self.fetch_files() else: self.specdir = dirname(self.specuri.get_uri()) self.sourceArchive = SourceArchive(self.spec, self.pkg_work_dir()) self.set_environment_vars() self.actionLocals = None self.actionGlobals = None self.srcDir = None
def prepare_for_build(pspecfile, authInfo=None): # FIXME: there is a function named "build" in this module which # makes it impossible to use build module directly. from build import PisiBuild url = URI(pspecfile) if url.is_remote_file(): from sourcefetcher import SourceFetcher fs = SourceFetcher(url, authInfo) url.uri = fs.fetch_all() pb = PisiBuild(url.uri) # find out the build dependencies that are not satisfied... dep_unsatis = [] for dep in pb.spec.source.buildDeps: if not dependency.installed_satisfies_dep(dep): dep_unsatis.append(dep) # FIXME: take care of the required buildDeps... # For now just report an error! if dep_unsatis: ctx.ui.error(_("Unsatisfied Build Dependencies:")) for dep in dep_unsatis: ctx.ui.warning(dep.package) # FIXME: Don't exit for now! It's annoying to test on a system that # doesn't has all packages made with pisi. # Will be enabled on the full-pisi system. # sys.exit(1) return pb
def set_spec_file(self, specuri): if not specuri.is_remote_file(): specuri = URI(os.path.realpath(specuri.get_uri())) # FIXME: doesn't work for file:// self.specuri = specuri spec = SpecFile() spec.read(specuri, ctx.config.tmp_dir()) self.spec = spec
def set_spec_file(self, specuri): if not specuri.is_remote_file(): # FIXME: doesn't work for file:// specuri = URI(os.path.realpath(specuri.get_uri())) self.specuri = specuri spec = SpecFile() spec.read(specuri, ctx.config.tmp_dir()) self.spec = spec
def __init__(self, packagefn, mode='r'): self.filepath = packagefn url = URI(packagefn) if url.is_remote_file(): self.fetch_remote_file(url) self.impl = archive.ArchiveZip(self.filepath, 'zip', mode)
def install_single(pkg, upgrade=False): """install a single package from URI or ID""" url = URI(pkg) # Check if we are dealing with a remote file or a real path of # package filename. Otherwise we'll try installing a package from # the package repository. if url.is_remote_file() or os.path.exists(url.uri): install_single_file(pkg, upgrade) else: install_single_name(pkg, upgrade)
def install_single(pkg, upgrade = False): """install a single package from URI or ID""" url = URI(pkg) # Check if we are dealing with a remote file or a real path of # package filename. Otherwise we'll try installing a package from # the package repository. if url.is_remote_file() or os.path.exists(url.uri): install_single_file(pkg, upgrade) else: install_single_name(pkg, upgrade)
def prepare_for_build(pspecfile, authInfo=None): url = URI(pspecfile) if url.is_remote_file(): from sourcefetcher import SourceFetcher fs = SourceFetcher(url, authInfo) url.uri = fs.fetch_all() import pisi.build pb = pisi.build.Builder(url.uri) return pb
def __init__(self, url, dest): if not isinstance(url, URI): url = URI(url) self.scheme = url.scheme() self.url = url self.filedest = dest util.check_dir(self.filedest) self.percent = 0 self.rate = 0.0 self.progress = None self.existsize = 0
def prepare_for_build(pspecfile, authInfo=None): # FIXME: there is a function named "build" in this module which # makes it impossible to use build module directly. from build import PisiBuild url = URI(pspecfile) if url.is_remote_file(): from sourcefetcher import SourceFetcher fs = SourceFetcher(url, authInfo) url.uri = fs.fetch_all() pb = PisiBuild(url.uri) return pb
def __init__(self, url, destdir, resume = True): if not isinstance(url, URI): url = URI(url) self.resume = resume self.scheme = url.scheme() self.url = url self.destdir = destdir util.check_dir(self.destdir) self.eta = '??:??:??' self.percent = 0 self.rate = 0.0 self.progress = None self.exist_size = 0
def add_repo(name, indexuri, at=None): if ctx.repodb.has_repo(name): raise Error(_('Repo %s already present.') % name) else: repo = db.repo.Repo(URI(indexuri)) ctx.repodb.add_repo(name, repo, at=at) ctx.ui.info(_('Repo %s added to system.') % name)
def read_uri(self, filename, repo = None): """Read PSPEC file""" self.filepath = filename url = URI(filename) if url.is_remote_file(): from fetcher import fetch_url assert repo dest = os.path.join(ctx.config.index_dir(), repo) if not os.path.exists(dest): os.makedirs(dest) fetch_url(url, dest, ctx.ui.Progress) self.filepath = os.path.join(dest, url.filename()) self.read(self.filepath)
def __init__(self, bctx): self.url = URI(bctx.spec.source.archiveUri) self.archiveFile = join(ctx.config.archives_dir(), self.url.filename()) self.archiveName = bctx.spec.source.archiveName self.archiveType = bctx.spec.source.archiveType self.archiveSHA1 = bctx.spec.source.archiveSHA1 self.bctx = bctx
def make_uri(uri): "handle URI arg" if type(uri) == types.StringType or type(uri) == types.UnicodeType: uri = URI(uri) elif not isinstance(uri, URI): raise Error(_("uri must have type either URI or string")) return uri
class SourceArchive: """source archive. this is a class responsible for fetching and unpacking a source archive""" def __init__(self, spec, pkg_work_dir): self.url = URI(spec.source.archive.uri) self.pkg_work_dir = pkg_work_dir self.archiveFile = join(ctx.config.archives_dir(), self.url.filename()) self.archive = spec.source.archive def fetch(self, interactive=True): if not self.is_cached(interactive): if interactive: progress = ctx.ui.Progress else: progress = None try: fetch_url(self.url, ctx.config.archives_dir(), progress) except pisi.fetcher.FetchError: # if archive can not be reached from the url, try the fallback # address. if ctx.config.values.build.fallback: archive = basename(self.url.get_uri()) src = join(ctx.config.values.build.fallback, archive) fetch_url(src, ctx.config.archives_dir(), progress) else: raise def is_cached(self, interactive=True): if not access(self.archiveFile, R_OK): return False # check hash if util.check_file_hash(self.archiveFile, self.archive.sha1sum): if interactive: ctx.ui.info(_('%s [cached]') % self.archive.name) return True return False def unpack(self, clean_dir=True): # check archive file's integrity if not util.check_file_hash(self.archiveFile, self.archive.sha1sum): raise Error, _("unpack: check_file_hash failed") archive = Archive(self.archiveFile, self.archive.type) archive.unpack(self.pkg_work_dir, clean_dir)
def rebuild_repo(repo): ctx.ui.info(_('* Rebuilding \'%s\' named repo... ') % repo) if ctx.repodb.has_repo(repo): repouri = URI(ctx.repodb.get_repo(repo).indexuri.get_uri()) indexname = repouri.filename() index = Index() indexpath = pisi.util.join_path(ctx.config.index_dir(), repo, indexname) tmpdir = os.path.join(ctx.config.tmp_dir(), 'index') pisi.util.clean_dir(tmpdir) pisi.util.check_dir(tmpdir) try: index.read_uri(indexpath, tmpdir, force=True) # don't look for sha1sum there except IOError, e: ctx.ui.warning(_("Input/Output error while reading %s: %s") % (indexpath, unicode(e))) return ctx.txn_proc(lambda txn : index.update_db(repo, txn=txn))
def __init__(self, packagefn, mode='r'): self.filepath = packagefn url = URI(packagefn) if url.is_remote_file(): from fetcher import fetch_url dest = ctx.config.packages_dir() self.filepath = join(dest, url.filename()) # FIXME: exists is not enough, also sha1sum check needed \ # when implemented in pisi-index.xml if not exists(self.filepath): fetch_url(url, dest, ctx.ui.Progress) else: ctx.ui.info(_('%s [cached]') % url.filename()) self.impl = archive.ArchiveZip(self.filepath, 'zip', mode)
def __init__(self, url, destdir, resume = True): if not isinstance(url, URI): url = URI(url) if ctx.config.get_option("authinfo"): url.set_auth_info(ctx.config.get_option("authinfo")) self.resume = resume self.scheme = url.scheme() self.url = url self.destdir = destdir util.check_dir(self.destdir) self.eta = '??:??:??' self.percent = 0 self.rate = 0.0 self.progress = None self.exist_size = 0
def check_signature(uri, transfer_dir, sign=detached): if sign == File.detached: try: sigfilename = File.download(URI(uri + '.sig'), transfer_dir) except: raise NoSignatureFound(uri) if os.system('gpg --verify ' + sigfilename) != 0: raise InvalidSignature(uri)
def __init__(self, url, destdir, resume=True): if not isinstance(url, URI): url = URI(url) if ctx.config.get_option("authinfo"): url.set_auth_info(ctx.config.get_option("authinfo")) self.resume = resume self.scheme = url.scheme() self.url = url self.destdir = destdir util.check_dir(self.destdir) self.eta = '??:??:??' self.percent = 0 self.rate = 0.0 self.progress = None self.exist_size = 0
def formatRequest(self, request): if self.url.auth_info(): enc = encodestring('%s:%s' % self.url.auth_info()) request.add_header('Authorization', 'Basic %s' % enc) range_handlers = { 'http': HTTPRangeHandler, 'https': HTTPRangeHandler, 'ftp': FTPRangeHandler } if self.exist_size and range_handlers.has_key(self.scheme): opener = urllib2.build_opener(range_handlers.get(self.scheme)()) urllib2.install_opener(opener) request.add_header('Range', 'bytes=%d-' % self.exist_size) proxy_handler = None if ctx.config.values.general.http_proxy and self.url.scheme( ) == "http": http_proxy = ctx.config.values.general.http_proxy proxy_handler = urllib2.ProxyHandler( {URI(http_proxy).scheme(): http_proxy}) elif ctx.config.values.general.https_proxy and self.url.scheme( ) == "https": https_proxy = ctx.config.values.general.https_proxy proxy_handler = urllib2.ProxyHandler( {URI(https_proxy): https_proxy}) elif ctx.config.values.general.ftp_proxy and self.url.scheme( ) == "ftp": ftp_proxy = ctx.config.values.general.ftp_proxy proxy_handler = urllib2.ProxyHandler({URI(http_proxy): ftp_proxy}) if proxy_handler: ctx.ui.info( _("Proxy configuration has been found for '%s' protocol") % self.url.scheme()) opener = urllib2.build_opener(proxy_handler) urllib2.install_opener(opener) return request
def from_name(name, authinfo = None): # download package and return an installer object # find package in repository sf, reponame = ctx.sourcedb.get_spec_repo(name) src = sf.source if src: src_uri = URI(src.sourceURI) if src_uri.is_absolute_path(): pkg_path = str(src_uri) else: repo = ctx.repodb.get_repo(reponame) src_path = os.path.join(os.path.dirname(repo.indexuri.get_uri()), str(src_uri.path())) ctx.ui.debug(_("Source URI: %s") % src_path) return Builder(src_path, authinfo) else: raise Error(_("Source %s not found in any active repository.") % name)
def download(uri, transfer_dir="/tmp", sha1sum=False, compress=None, sign=None, copylocal=False): assert isinstance(uri, URI) if sha1sum: sha1filename = File.download(URI(uri.get_uri() + '.sha1sum'), transfer_dir) sha1f = file(sha1filename) newsha1 = sha1f.readlines()[0] if uri.is_remote_file() or copylocal: localfile = join(transfer_dir, uri.filename()) # TODO: code to use old .sha1sum file, is this a necessary optimization? #oldsha1fn = localfile + '.sha1sum' #if os.exists(oldsha1fn): #oldsha1 = file(oldsha1fn).readlines()[0] if sha1sum and os.path.exists(localfile): oldsha1 = pisi.util.sha1_file(localfile) if (newsha1 == oldsha1): # early terminate, we already got it ;) raise AlreadyHaveException(uri, localfile) if uri.is_remote_file(): ctx.ui.info(_("Fetching %s") % uri.get_uri(), verbose=True) fetch_url(uri, transfer_dir, ctx.ui.Progress) else: # copy to transfer dir, localfile = join(transfer_dir, uri.filename()) ctx.ui.info(_("Copying %s to transfer dir") % uri.get_uri(), verbose=True) shutil.copy(uri.get_uri(), transfer_dir) else: localfile = uri.get_uri() #TODO: use a special function here? if not os.path.exists(localfile): raise IOError(_("File '%s' not found.") % localfile) if not os.access(localfile, os.W_OK): oldfn = localfile localfile = join(transfer_dir, os.path.basename(localfile)) shutil.copy(oldfn, localfile) if sha1sum: if (pisi.util.sha1_file(localfile) != newsha1): raise Error(_("File integrity of %s compromised.") % uri) localfile = File.decompress(localfile, compress) return localfile
def from_name(name): # download package and return an installer object # find package in repository repo = packagedb.which_repo(name) if repo: repo = ctx.repodb.get_repo(repo) pkg = packagedb.get_package(name) # FIXME: let pkg.packageURI be stored as URI type rather than string pkg_uri = URI(pkg.packageURI) if pkg_uri.is_absolute_path(): pkg_path = str(pkg.packageURI) else: pkg_path = os.path.join(os.path.dirname(repo.indexuri.get_uri()), str(pkg_uri.path())) ctx.ui.debug(_("Package URI: %s") % pkg_path) return Install(pkg_path) else: raise Error(_("Package %s not found in any active repository.") % name)
def rebuild_repo(repo): ctx.ui.info(_('* Rebuilding \'%s\' named repo... ') % repo, noln=True) if ctx.repodb.has_repo(repo): repouri = URI(ctx.repodb.get_repo(repo).indexuri.get_uri()) indexname = repouri.filename() index = Index() indexpath = pisi.util.join_path(ctx.config.index_dir(), repo, indexname) tmpdir = os.path.join(ctx.config.tmp_dir(), 'index') pisi.util.clean_dir(tmpdir) pisi.util.check_dir(tmpdir) try: index.read_uri(indexpath, tmpdir, force=True) # don't look for sha1sum there except IOError, e: ctx.ui.warning( _("Input/Output error while reading %s: %s") % (indexpath, unicode(e))) return ctx.txn_proc(lambda txn: index.update_db(repo, txn=txn)) ctx.ui.info(_('OK.'))
def __init__(self, specuri): # process args if not isinstance(specuri, URI): specuri = URI(specuri) # read spec file, we'll need it :) self.set_spec_file(specuri) if specuri.is_remote_file(): #make local here and f**k up self.specdir = self.fetch_files() else: self.specdir = dirname(self.specuri.get_uri()) self.sourceArchive = SourceArchive(self.spec, self.pkg_work_dir()) self.set_environment_vars() self.actionLocals = None self.actionGlobals = None self.srcDir = None
def install_single_name(name, upgrade = False): """install a single package from ID""" # find package in repository repo = packagedb.which_repo(name) if repo: repo = ctx.repodb.get_repo(repo) pkg = packagedb.get_package(name) # FIXME: let pkg.packageURI be stored as URI type rather than string pkg_uri = URI(pkg.packageURI) if pkg_uri.is_absolute_path(): pkg_path = str(pkg.packageURI) else: pkg_path = os.path.join(os.path.dirname(repo.indexuri.get_uri()), str(pkg_uri.path())) ctx.ui.debug(_("Package URI: %s") % pkg_path) # Package will handle remote file for us! install_single_file(pkg_path, upgrade) else: raise Error(_("Package %s not found in any active repository.") % name)
def read(self, filename, repo = None): """Read PSPEC file""" self.filepath = filename url = URI(filename) if url.is_remote_file(): from fetcher import fetch_url dest = os.path.join(ctx.config.index_dir(), repo) if not os.path.exists(dest): os.makedirs(dest) fetch_url(url, dest, ctx.ui.Progress) self.filepath = os.path.join(dest, url.filename()) self.readxml(self.filepath) # find all binary packages packageElts = self.getAllNodes("Package") self.packages = [metadata.PackageInfo(p) for p in packageElts] self.unlink()
def from_name(name): # download package and return an installer object # find package in repository sf, reponame = ctx.sourcedb.get_spec_repo(name) src = sf.source if src: src_uri = URI(src.sourceURI) if src_uri.is_absolute_path(): src_path = str(src_uri) else: repo = ctx.repodb.get_repo(reponame) #FIXME: don't use dirname to work on URLs src_path = os.path.join( os.path.dirname(repo.indexuri.get_uri()), str(src_uri.path())) ctx.ui.debug(_("Source URI: %s") % src_path) return Builder(src_path) else: raise Error( _("Source %s not found in any active repository.") % name)
def rebuild_repo(repo): ctx.ui.info(_('* Rebuilding \'%s\' named repo... ') % repo, noln=True) index = Index() if ctx.repodb.has_repo(repo): repouri = URI(ctx.repodb.get_repo(repo).indexuri.get_uri()) indexname = repouri.filename() indexpath = pisi.util.join_path(ctx.config.lib_dir(), 'index', repo, indexname) uri_str = repouri.get_uri() if os.path.exists(indexpath): uri_str = indexpath try: index.read_uri(uri_str, repo, force = True) except IOError: ctx.ui.warning(_("Repo index file \'%s\' not found.") % uri_str) return else: raise Error(_('No repository named %s found.') % repo) ctx.txn_proc(lambda txn : index.update_db(repo, txn=txn)) ctx.ui.info(_('OK.'))
def from_name(name, ignore_dep=None): # download package and return an installer object # find package in repository repo = ctx.packagedb.which_repo(name) if repo: ctx.ui.info(_("Package %s found in repository %s") % (name, repo)) repo = ctx.repodb.get_repo(repo) pkg = ctx.packagedb.get_package(name) # FIXME: let pkg.packageURI be stored as URI type rather than string pkg_uri = URI(pkg.packageURI) if pkg_uri.is_absolute_path(): pkg_path = str(pkg.packageURI) else: pkg_path = os.path.join( os.path.dirname(repo.indexuri.get_uri()), str(pkg_uri.path())) ctx.ui.info(_("Package URI: %s") % pkg_path, verbose=True) return Install(pkg_path, ignore_dep) else: raise Error( _("Package %s not found in any active repository.") % name)
class SourceArchive: """source archive. this is a class responsible for fetching and unpacking a source archive""" def __init__(self, spec, pkg_work_dir): self.url = URI(spec.source.archive.uri) self.pkg_work_dir = pkg_work_dir self.archiveFile = util.join_path(ctx.config.archives_dir(), self.url.filename()) self.archive = spec.source.archive def fetch(self, interactive=True): if not self.is_cached(interactive): if interactive: progress = ctx.ui.Progress else: progress = None fetch_url(self.url, ctx.config.archives_dir(), progress) def is_cached(self, interactive=True): if not access(self.archiveFile, R_OK): return False # check hash if util.check_file_hash(self.archiveFile, self.archive.sha1sum): if interactive: ctx.ui.info(_('%s [cached]') % self.archive.name) return True return False def unpack(self, clean_dir=True, target_dir=None): ctx.ui.debug("unpack: %s, %s" % (self.archiveFile, self.archive.sha1sum)) # check archive file's integrity if not util.check_file_hash(self.archiveFile, self.archive.sha1sum): raise Error, _("Unpack: archive file integrity is compromised") archive = Archive(self.archiveFile, self.archive.type) unpack_dir = self.pkg_work_dir if self.archive.norootdir == "true": os.makedirs(target_dir) unpack_dir = target_dir archive.unpack(unpack_dir, clean_dir)
def fetch_component(self): if not self.spec.source.partOf: ctx.ui.warning(_('PartOf tag not defined, looking for component')) diruri = parenturi(self.specuri.get_uri()) parentdir = parenturi(diruri) url = util.join_path(parentdir, 'component.xml') progress = ctx.ui.Progress if URI(url).is_remote_file(): fetch_url(url, self.pkg_work_dir(), progress) path = util.join_path(self.pkg_work_dir(), 'component.xml') else: if not os.path.exists(url): raise Exception( _('Cannot find component.xml in upper directory')) path = url comp = component.Component() comp.read(path) ctx.ui.info(_('Source is part of %s component') % comp.name) self.spec.source.partOf = comp.name self.spec.override_tags()
class SourceArchive: """source archive. this is a class responsible for fetching and unpacking a source archive""" def __init__(self, bctx): self.url = URI(bctx.spec.source.archiveUri) self.archiveFile = join(ctx.config.archives_dir(), self.url.filename()) self.archiveName = bctx.spec.source.archiveName self.archiveType = bctx.spec.source.archiveType self.archiveSHA1 = bctx.spec.source.archiveSHA1 self.bctx = bctx def fetch(self, interactive=True): if not self.is_cached(interactive): if interactive: progress = ctx.ui.Progress else: progress = None fetch_url(self.url, ctx.config.archives_dir(), progress) def is_cached(self, interactive=True): if not access(self.archiveFile, R_OK): return False # check hash if util.check_file_hash(self.archiveFile, self.archiveSHA1): if interactive: ctx.ui.info('%s [cached]' % self.archiveName) return True return False def unpack(self, cleanDir=True): # check archive file's integrity if not util.check_file_hash(self.archiveFile, self.archiveSHA1): raise SourceArchiveError, "unpack: check_file_hash failed" archive = Archive(self.archiveFile, self.archiveType) archive.unpack(self.bctx.pkg_work_dir(), cleanDir)
class SourceArchive: """source archive. this is a class responsible for fetching and unpacking a source archive""" def __init__(self, spec, pkg_work_dir): self.url = URI(spec.source.archive.uri) self.pkg_work_dir = pkg_work_dir self.archiveFile = join(ctx.config.archives_dir(), self.url.filename()) self.archive = spec.source.archive def fetch(self, interactive=True): if not self.is_cached(interactive): if interactive: progress = ctx.ui.Progress else: progress = None fetch_url(self.url, ctx.config.archives_dir(), progress) def is_cached(self, interactive=True): if not access(self.archiveFile, R_OK): return False # check hash if util.check_file_hash(self.archiveFile, self.archive.sha1sum): if interactive: ctx.ui.info(_('%s [cached]') % self.archive.name) return True return False def unpack(self, clean_dir=True): # check archive file's integrity if not util.check_file_hash(self.archiveFile, self.archive.sha1sum): raise Error, _("unpack: check_file_hash failed") archive = Archive(self.archiveFile, self.archive.type) archive.unpack(self.pkg_work_dir, clean_dir)
def __init__(self, spec, pkg_work_dir): self.url = URI(spec.source.archive.uri) self.pkg_work_dir = pkg_work_dir self.archiveFile = join(ctx.config.archives_dir(), self.url.filename()) self.archive = spec.source.archive
class SourceArchive: """source archive. this is a class responsible for fetching and unpacking a source archive""" def __init__(self, spec, pkg_work_dir): self.url = URI(spec.source.archive.uri) self.pkg_work_dir = pkg_work_dir self.archiveFile = join(ctx.config.archives_dir(), self.url.filename()) self.archive = spec.source.archive def fetch(self, interactive=True): if not self.is_cached(interactive): if interactive: self.progress = ctx.ui.Progress else: self.progress = None try: if self.url.get_uri().startswith("mirrors://"): self.fetch_from_mirror() else: fetch_url(self.url, ctx.config.archives_dir(), self.progress) except pisi.fetcher.FetchError: if ctx.config.values.build.fallback: self.fetch_from_fallback() else: raise def fetch_from_fallback(self): archive = basename(self.url.get_uri()) src = join(ctx.config.values.build.fallback, archive) ctx.ui.warning(_('Trying fallback address: %s') % src) fetch_url(src, ctx.config.archives_dir(), self.progress) def fetch_from_mirror(self): uri = self.url.get_uri() sep = uri[len("mirrors://"):].split("/") name = sep.pop(0) archive = "/".join(sep) mirrors = Mirrors().get_mirrors(name) if not mirrors: raise Error(_("%s mirrors are not defined.") % name) for mirror in mirrors: try: url = join(mirror, archive) ctx.ui.warning(_('Fetching source from mirror: %s') % url) fetch_url(url, ctx.config.archives_dir(), self.progress) return except pisi.fetcher.FetchError: pass raise pisi.fetcher.FetchError(_('Could not fetch source from %s mirrors.') % name); def is_cached(self, interactive=True): if not access(self.archiveFile, R_OK): return False # check hash if util.check_file_hash(self.archiveFile, self.archive.sha1sum): if interactive: ctx.ui.info(_('%s [cached]') % self.archive.name) return True return False def unpack(self, clean_dir=True): # check archive file's integrity if not util.check_file_hash(self.archiveFile, self.archive.sha1sum): raise Error, _("unpack: check_file_hash failed") archive = Archive(self.archiveFile, self.archive.type) archive.unpack(self.pkg_work_dir, clean_dir)