def __init__(self, packagefn, mode='r'): self.filepath = packagefn url = URI(packagefn) if url.is_remote_file(): from fetcher import fetch_url dest = ctx.config.packages_dir() self.filepath = join(dest, url.filename()) # FIXME: exists is not enough, also sha1sum check needed \ # when implemented in pisi-index.xml if not exists(self.filepath): fetch_url(url, dest, ctx.ui.Progress) else: ctx.ui.info(_('%s [cached]') % url.filename()) self.impl = archive.ArchiveZip(self.filepath, 'zip', mode)
def read_uri(self, filename, repo = None): """Read PSPEC file""" self.filepath = filename url = URI(filename) if url.is_remote_file(): from fetcher import fetch_url assert repo dest = os.path.join(ctx.config.index_dir(), repo) if not os.path.exists(dest): os.makedirs(dest) fetch_url(url, dest, ctx.ui.Progress) self.filepath = os.path.join(dest, url.filename()) self.read(self.filepath)
class SourceArchive: """source archive. this is a class responsible for fetching and unpacking a source archive""" def __init__(self, spec, pkg_work_dir): self.url = URI(spec.source.archive.uri) self.pkg_work_dir = pkg_work_dir self.archiveFile = join(ctx.config.archives_dir(), self.url.filename()) self.archive = spec.source.archive def fetch(self, interactive=True): if not self.is_cached(interactive): if interactive: progress = ctx.ui.Progress else: progress = None try: fetch_url(self.url, ctx.config.archives_dir(), progress) except pisi.fetcher.FetchError: # if archive can not be reached from the url, try the fallback # address. if ctx.config.values.build.fallback: archive = basename(self.url.get_uri()) src = join(ctx.config.values.build.fallback, archive) fetch_url(src, ctx.config.archives_dir(), progress) else: raise def is_cached(self, interactive=True): if not access(self.archiveFile, R_OK): return False # check hash if util.check_file_hash(self.archiveFile, self.archive.sha1sum): if interactive: ctx.ui.info(_('%s [cached]') % self.archive.name) return True return False def unpack(self, clean_dir=True): # check archive file's integrity if not util.check_file_hash(self.archiveFile, self.archive.sha1sum): raise Error, _("unpack: check_file_hash failed") archive = Archive(self.archiveFile, self.archive.type) archive.unpack(self.pkg_work_dir, clean_dir)
def rebuild_repo(repo): ctx.ui.info(_('* Rebuilding \'%s\' named repo... ') % repo) if ctx.repodb.has_repo(repo): repouri = URI(ctx.repodb.get_repo(repo).indexuri.get_uri()) indexname = repouri.filename() index = Index() indexpath = pisi.util.join_path(ctx.config.index_dir(), repo, indexname) tmpdir = os.path.join(ctx.config.tmp_dir(), 'index') pisi.util.clean_dir(tmpdir) pisi.util.check_dir(tmpdir) try: index.read_uri(indexpath, tmpdir, force=True) # don't look for sha1sum there except IOError, e: ctx.ui.warning(_("Input/Output error while reading %s: %s") % (indexpath, unicode(e))) return ctx.txn_proc(lambda txn : index.update_db(repo, txn=txn))
class SourceArchive: """source archive. this is a class responsible for fetching and unpacking a source archive""" def __init__(self, spec, pkg_work_dir): self.url = URI(spec.source.archive.uri) self.pkg_work_dir = pkg_work_dir self.archiveFile = util.join_path(ctx.config.archives_dir(), self.url.filename()) self.archive = spec.source.archive def fetch(self, interactive=True): if not self.is_cached(interactive): if interactive: progress = ctx.ui.Progress else: progress = None fetch_url(self.url, ctx.config.archives_dir(), progress) def is_cached(self, interactive=True): if not access(self.archiveFile, R_OK): return False # check hash if util.check_file_hash(self.archiveFile, self.archive.sha1sum): if interactive: ctx.ui.info(_('%s [cached]') % self.archive.name) return True return False def unpack(self, clean_dir=True, target_dir=None): ctx.ui.debug("unpack: %s, %s" % (self.archiveFile, self.archive.sha1sum)) # check archive file's integrity if not util.check_file_hash(self.archiveFile, self.archive.sha1sum): raise Error, _("Unpack: archive file integrity is compromised") archive = Archive(self.archiveFile, self.archive.type) unpack_dir = self.pkg_work_dir if self.archive.norootdir == "true": os.makedirs(target_dir) unpack_dir = target_dir archive.unpack(unpack_dir, clean_dir)
def read(self, filename, repo = None): """Read PSPEC file""" self.filepath = filename url = URI(filename) if url.is_remote_file(): from fetcher import fetch_url dest = os.path.join(ctx.config.index_dir(), repo) if not os.path.exists(dest): os.makedirs(dest) fetch_url(url, dest, ctx.ui.Progress) self.filepath = os.path.join(dest, url.filename()) self.readxml(self.filepath) # find all binary packages packageElts = self.getAllNodes("Package") self.packages = [metadata.PackageInfo(p) for p in packageElts] self.unlink()
def rebuild_repo(repo): ctx.ui.info(_('* Rebuilding \'%s\' named repo... ') % repo, noln=True) if ctx.repodb.has_repo(repo): repouri = URI(ctx.repodb.get_repo(repo).indexuri.get_uri()) indexname = repouri.filename() index = Index() indexpath = pisi.util.join_path(ctx.config.index_dir(), repo, indexname) tmpdir = os.path.join(ctx.config.tmp_dir(), 'index') pisi.util.clean_dir(tmpdir) pisi.util.check_dir(tmpdir) try: index.read_uri(indexpath, tmpdir, force=True) # don't look for sha1sum there except IOError, e: ctx.ui.warning( _("Input/Output error while reading %s: %s") % (indexpath, unicode(e))) return ctx.txn_proc(lambda txn: index.update_db(repo, txn=txn)) ctx.ui.info(_('OK.'))
class SourceArchive: """source archive. this is a class responsible for fetching and unpacking a source archive""" def __init__(self, bctx): self.url = URI(bctx.spec.source.archiveUri) self.archiveFile = join(ctx.config.archives_dir(), self.url.filename()) self.archiveName = bctx.spec.source.archiveName self.archiveType = bctx.spec.source.archiveType self.archiveSHA1 = bctx.spec.source.archiveSHA1 self.bctx = bctx def fetch(self, interactive=True): if not self.is_cached(interactive): if interactive: progress = ctx.ui.Progress else: progress = None fetch_url(self.url, ctx.config.archives_dir(), progress) def is_cached(self, interactive=True): if not access(self.archiveFile, R_OK): return False # check hash if util.check_file_hash(self.archiveFile, self.archiveSHA1): if interactive: ctx.ui.info('%s [cached]' % self.archiveName) return True return False def unpack(self, cleanDir=True): # check archive file's integrity if not util.check_file_hash(self.archiveFile, self.archiveSHA1): raise SourceArchiveError, "unpack: check_file_hash failed" archive = Archive(self.archiveFile, self.archiveType) archive.unpack(self.bctx.pkg_work_dir(), cleanDir)
class SourceArchive: """source archive. this is a class responsible for fetching and unpacking a source archive""" def __init__(self, spec, pkg_work_dir): self.url = URI(spec.source.archive.uri) self.pkg_work_dir = pkg_work_dir self.archiveFile = join(ctx.config.archives_dir(), self.url.filename()) self.archive = spec.source.archive def fetch(self, interactive=True): if not self.is_cached(interactive): if interactive: progress = ctx.ui.Progress else: progress = None fetch_url(self.url, ctx.config.archives_dir(), progress) def is_cached(self, interactive=True): if not access(self.archiveFile, R_OK): return False # check hash if util.check_file_hash(self.archiveFile, self.archive.sha1sum): if interactive: ctx.ui.info(_('%s [cached]') % self.archive.name) return True return False def unpack(self, clean_dir=True): # check archive file's integrity if not util.check_file_hash(self.archiveFile, self.archive.sha1sum): raise Error, _("unpack: check_file_hash failed") archive = Archive(self.archiveFile, self.archive.type) archive.unpack(self.pkg_work_dir, clean_dir)
def rebuild_repo(repo): ctx.ui.info(_('* Rebuilding \'%s\' named repo... ') % repo, noln=True) index = Index() if ctx.repodb.has_repo(repo): repouri = URI(ctx.repodb.get_repo(repo).indexuri.get_uri()) indexname = repouri.filename() indexpath = pisi.util.join_path(ctx.config.lib_dir(), 'index', repo, indexname) uri_str = repouri.get_uri() if os.path.exists(indexpath): uri_str = indexpath try: index.read_uri(uri_str, repo, force = True) except IOError: ctx.ui.warning(_("Repo index file \'%s\' not found.") % uri_str) return else: raise Error(_('No repository named %s found.') % repo) ctx.txn_proc(lambda txn : index.update_db(repo, txn=txn)) ctx.ui.info(_('OK.'))
class SourceArchive: """source archive. this is a class responsible for fetching and unpacking a source archive""" def __init__(self, spec, pkg_work_dir): self.url = URI(spec.source.archive.uri) self.pkg_work_dir = pkg_work_dir self.archiveFile = join(ctx.config.archives_dir(), self.url.filename()) self.archive = spec.source.archive def fetch(self, interactive=True): if not self.is_cached(interactive): if interactive: self.progress = ctx.ui.Progress else: self.progress = None try: if self.url.get_uri().startswith("mirrors://"): self.fetch_from_mirror() else: fetch_url(self.url, ctx.config.archives_dir(), self.progress) except pisi.fetcher.FetchError: if ctx.config.values.build.fallback: self.fetch_from_fallback() else: raise def fetch_from_fallback(self): archive = basename(self.url.get_uri()) src = join(ctx.config.values.build.fallback, archive) ctx.ui.warning(_('Trying fallback address: %s') % src) fetch_url(src, ctx.config.archives_dir(), self.progress) def fetch_from_mirror(self): uri = self.url.get_uri() sep = uri[len("mirrors://"):].split("/") name = sep.pop(0) archive = "/".join(sep) mirrors = Mirrors().get_mirrors(name) if not mirrors: raise Error(_("%s mirrors are not defined.") % name) for mirror in mirrors: try: url = join(mirror, archive) ctx.ui.warning(_('Fetching source from mirror: %s') % url) fetch_url(url, ctx.config.archives_dir(), self.progress) return except pisi.fetcher.FetchError: pass raise pisi.fetcher.FetchError(_('Could not fetch source from %s mirrors.') % name); def is_cached(self, interactive=True): if not access(self.archiveFile, R_OK): return False # check hash if util.check_file_hash(self.archiveFile, self.archive.sha1sum): if interactive: ctx.ui.info(_('%s [cached]') % self.archive.name) return True return False def unpack(self, clean_dir=True): # check archive file's integrity if not util.check_file_hash(self.archiveFile, self.archive.sha1sum): raise Error, _("unpack: check_file_hash failed") archive = Archive(self.archiveFile, self.archive.type) archive.unpack(self.pkg_work_dir, clean_dir)