def set_spec_file(self, specuri): if not specuri.is_remote_file(): specuri = URI(os.path.realpath(specuri.get_uri())) # FIXME: doesn't work for file:// self.specuri = specuri spec = SpecFile() spec.read(specuri, ctx.config.tmp_dir()) self.spec = spec
def set_spec_file(self, specuri): if not specuri.is_remote_file(): # FIXME: doesn't work for file:// specuri = URI(os.path.realpath(specuri.get_uri())) self.specuri = specuri spec = SpecFile() spec.read(specuri, ctx.config.tmp_dir()) self.spec = spec
class SourceArchive: """source archive. this is a class responsible for fetching and unpacking a source archive""" def __init__(self, spec, pkg_work_dir): self.url = URI(spec.source.archive.uri) self.pkg_work_dir = pkg_work_dir self.archiveFile = join(ctx.config.archives_dir(), self.url.filename()) self.archive = spec.source.archive def fetch(self, interactive=True): if not self.is_cached(interactive): if interactive: progress = ctx.ui.Progress else: progress = None try: fetch_url(self.url, ctx.config.archives_dir(), progress) except pisi.fetcher.FetchError: # if archive can not be reached from the url, try the fallback # address. if ctx.config.values.build.fallback: archive = basename(self.url.get_uri()) src = join(ctx.config.values.build.fallback, archive) fetch_url(src, ctx.config.archives_dir(), progress) else: raise def is_cached(self, interactive=True): if not access(self.archiveFile, R_OK): return False # check hash if util.check_file_hash(self.archiveFile, self.archive.sha1sum): if interactive: ctx.ui.info(_('%s [cached]') % self.archive.name) return True return False def unpack(self, clean_dir=True): # check archive file's integrity if not util.check_file_hash(self.archiveFile, self.archive.sha1sum): raise Error, _("unpack: check_file_hash failed") archive = Archive(self.archiveFile, self.archive.type) archive.unpack(self.pkg_work_dir, clean_dir)
def rebuild_repo(repo): ctx.ui.info(_('* Rebuilding \'%s\' named repo... ') % repo, noln=True) index = Index() if ctx.repodb.has_repo(repo): repouri = URI(ctx.repodb.get_repo(repo).indexuri.get_uri()) indexname = repouri.filename() indexpath = pisi.util.join_path(ctx.config.lib_dir(), 'index', repo, indexname) uri_str = repouri.get_uri() if os.path.exists(indexpath): uri_str = indexpath try: index.read_uri(uri_str, repo, force = True) except IOError: ctx.ui.warning(_("Repo index file \'%s\' not found.") % uri_str) return else: raise Error(_('No repository named %s found.') % repo) ctx.txn_proc(lambda txn : index.update_db(repo, txn=txn)) ctx.ui.info(_('OK.'))
class SourceArchive: """source archive. this is a class responsible for fetching and unpacking a source archive""" def __init__(self, spec, pkg_work_dir): self.url = URI(spec.source.archive.uri) self.pkg_work_dir = pkg_work_dir self.archiveFile = join(ctx.config.archives_dir(), self.url.filename()) self.archive = spec.source.archive def fetch(self, interactive=True): if not self.is_cached(interactive): if interactive: self.progress = ctx.ui.Progress else: self.progress = None try: if self.url.get_uri().startswith("mirrors://"): self.fetch_from_mirror() else: fetch_url(self.url, ctx.config.archives_dir(), self.progress) except pisi.fetcher.FetchError: if ctx.config.values.build.fallback: self.fetch_from_fallback() else: raise def fetch_from_fallback(self): archive = basename(self.url.get_uri()) src = join(ctx.config.values.build.fallback, archive) ctx.ui.warning(_('Trying fallback address: %s') % src) fetch_url(src, ctx.config.archives_dir(), self.progress) def fetch_from_mirror(self): uri = self.url.get_uri() sep = uri[len("mirrors://"):].split("/") name = sep.pop(0) archive = "/".join(sep) mirrors = Mirrors().get_mirrors(name) if not mirrors: raise Error(_("%s mirrors are not defined.") % name) for mirror in mirrors: try: url = join(mirror, archive) ctx.ui.warning(_('Fetching source from mirror: %s') % url) fetch_url(url, ctx.config.archives_dir(), self.progress) return except pisi.fetcher.FetchError: pass raise pisi.fetcher.FetchError(_('Could not fetch source from %s mirrors.') % name); def is_cached(self, interactive=True): if not access(self.archiveFile, R_OK): return False # check hash if util.check_file_hash(self.archiveFile, self.archive.sha1sum): if interactive: ctx.ui.info(_('%s [cached]') % self.archive.name) return True return False def unpack(self, clean_dir=True): # check archive file's integrity if not util.check_file_hash(self.archiveFile, self.archive.sha1sum): raise Error, _("unpack: check_file_hash failed") archive = Archive(self.archiveFile, self.archive.type) archive.unpack(self.pkg_work_dir, clean_dir)