def tools_fetch(self): p = re.compile("<Archive(.*)>(.*)</Archive>") data = unicode(self.spec_ed.text()) m = p.search(data) if not m or m.groups()[1] == "": QMessageBox.warning(self, "Fetch error", "Archive URI is not specified") return uri = pisi.uri.URI(m.groups()[1]) fname = os.path.join(pisi.context.config.archives_dir(), uri.filename()) if not os.access(fname, os.R_OK): try: fetch_url(uri, pisi.context.config.archives_dir()) except: QMessageBox.warning(self, "Fetch error", "Cannot fetch URI") return f = file(fname) s = sha.new(f.read()) digest = s.hexdigest() f.close() p2 = re.compile("sha1sum=\"(.*)\"") p3 = re.compile("sha1sum='(.*)'") m2 = p2.search(data, m.start(1), m.end(1)) m3 = p3.search(data, m.start(1), m.end(1)) if m2: data = data[:m2.start(1)] + digest + data[m2.end(1):] elif m3: data = data[:m3.start(1)] + digest + data[m3.end(1):] else: data = data[:m.end(1)] + " sha1sum='" + digest + "'" + data[m.end(1):] self.spec_ed.setText(data)
def fetch(self, interactive=True): if not self.is_cached(interactive): if interactive: progress = ctx.ui.Progress else: progress = None fetch_url(self.url, ctx.config.archives_dir(), progress)
def download(uri, transfer_dir = "/tmp"): assert type(uri == URI) if uri.is_remote_file(): ctx.ui.info(_("Fetching %s") % uri.get_uri()) localfile = join(transfer_dir, uri.filename()) fetch_url(uri, transfer_dir) # FIXME: localfile would look better for fetch iface? else: localfile = uri.get_uri() #TODO: use a special function here? return localfile
def download(uri, transfer_dir="/tmp", sha1sum=False, compress=None, sign=None, copylocal=False): assert isinstance(uri, URI) if sha1sum: sha1filename = File.download(URI(uri.get_uri() + '.sha1sum'), transfer_dir) sha1f = file(sha1filename) newsha1 = sha1f.readlines()[0] if uri.is_remote_file() or copylocal: localfile = join(transfer_dir, uri.filename()) # TODO: code to use old .sha1sum file, is this a necessary optimization? #oldsha1fn = localfile + '.sha1sum' #if os.exists(oldsha1fn): #oldsha1 = file(oldsha1fn).readlines()[0] if sha1sum and os.path.exists(localfile): oldsha1 = pisi.util.sha1_file(localfile) if (newsha1 == oldsha1): # early terminate, we already got it ;) raise AlreadyHaveException(uri, localfile) if uri.is_remote_file(): ctx.ui.info(_("Fetching %s") % uri.get_uri(), verbose=True) fetch_url(uri, transfer_dir, ctx.ui.Progress) else: # copy to transfer dir, localfile = join(transfer_dir, uri.filename()) ctx.ui.info(_("Copying %s to transfer dir") % uri.get_uri(), verbose=True) shutil.copy(uri.get_uri(), transfer_dir) else: localfile = uri.get_uri() #TODO: use a special function here? if not os.path.exists(localfile): raise IOError(_("File '%s' not found.") % localfile) if not os.access(localfile, os.W_OK): oldfn = localfile localfile = join(transfer_dir, os.path.basename(localfile)) shutil.copy(oldfn, localfile) if sha1sum: if (pisi.util.sha1_file(localfile) != newsha1): raise Error(_("File integrity of %s compromised.") % uri) localfile = File.decompress(localfile, compress) return localfile
def download(uri, transfer_dir = "/tmp", sha1sum = False, compress = None, sign = None, copylocal = False): assert isinstance(uri, URI) if sha1sum: sha1filename = File.download(URI(uri.get_uri() + '.sha1sum'), transfer_dir) sha1f = file(sha1filename) newsha1 = sha1f.readlines()[0] if uri.is_remote_file() or copylocal: localfile = join(transfer_dir, uri.filename()) # TODO: code to use old .sha1sum file, is this a necessary optimization? #oldsha1fn = localfile + '.sha1sum' #if os.exists(oldsha1fn): #oldsha1 = file(oldsha1fn).readlines()[0] if sha1sum and os.path.exists(localfile): oldsha1 = pisi.util.sha1_file(localfile) if (newsha1 == oldsha1): # early terminate, we already got it ;) raise AlreadyHaveException(uri, localfile) if uri.is_remote_file(): ctx.ui.info(_("Fetching %s") % uri.get_uri()) fetch_url(uri, transfer_dir) else: # copy to transfer dir, localfile = join(transfer_dir, uri.filename()) ctx.ui.info(_("Copying %s to transfer dir") % uri.get_uri()) shutil.copy(uri.get_uri(), transfer_dir) else: localfile = uri.get_uri() #TODO: use a special function here? if not os.path.exists(localfile): raise IOError(_("File '%s' not found.") % localfile) if not os.access(localfile, os.W_OK): oldfn = localfile localfile = join(transfer_dir, os.path.basename(localfile)) shutil.copy(oldfn, localfile) if sha1sum: if (pisi.util.sha1_file(localfile) != newsha1): raise Error(_("File integrity of %s compromised.") % uri) localfile = File.decompress(localfile, compress) return localfile
def fetch(self, interactive=True): if not self.is_cached(interactive): if interactive: self.progress = ctx.ui.Progress else: self.progress = None try: if self.url.get_uri().startswith("mirrors://"): self.fetch_from_mirror() else: fetch_url(self.url, ctx.config.archives_dir(), self.progress) except pisi.fetcher.FetchError: if ctx.config.values.build.fallback: self.fetch_from_fallback() else: raise
def fetch(self, interactive=True): if not self.is_cached(interactive): if interactive: progress = ctx.ui.Progress else: progress = None try: fetch_url(self.url, ctx.config.archives_dir(), progress) except pisi.fetcher.FetchError: # if archive can not be reached from the url, try the fallback # address. if ctx.config.values.build.fallback: archive = basename(self.url.get_uri()) src = join(ctx.config.values.build.fallback, archive) fetch_url(src, ctx.config.archives_dir(), progress) else: raise
def fetch_component(self): if not self.spec.source.partOf: ctx.ui.warning(_('PartOf tag not defined, looking for component')) diruri = parenturi(self.specuri.get_uri()) parentdir = parenturi(diruri) url = util.join_path(parentdir, 'component.xml') progress = ctx.ui.Progress if URI(url).is_remote_file(): fetch_url(url, self.pkg_work_dir(), progress) path = util.join_path(self.pkg_work_dir(), 'component.xml') else: if not os.path.exists(url): raise Exception(_('Cannot find component.xml in upper directory')) path = url comp = component.Component() comp.read(path) ctx.ui.info(_('Source is part of %s component') % comp.name) self.spec.source.partOf = comp.name
def fetch_component(self): if not self.spec.source.partOf: ctx.ui.warning(_('PartOf tag not defined, looking for component')) diruri = parenturi(self.specuri.get_uri()) parentdir = parenturi(diruri) url = util.join_path(parentdir, 'component.xml') progress = ctx.ui.Progress if URI(url).is_remote_file(): fetch_url(url, self.pkg_work_dir(), progress) path = util.join_path(self.pkg_work_dir(), 'component.xml') else: if not os.path.exists(url): raise Exception( _('Cannot find component.xml in upper directory')) path = url comp = component.Component() comp.read(path) ctx.ui.info(_('Source is part of %s component') % comp.name) self.spec.source.partOf = comp.name self.spec.override_tags()
def fetch_from_mirror(): uri = URI.get_uri() sep = uri[len("mirrors://"):].split("/") name = sep.pop(0) archive = "/".join(sep) mirrors = Mirrors().get_mirrors(name) if not mirrors: raise Error(_("%s mirrors are not defined.") % name) for mirror in mirrors: try: url = os.path.join(mirror, archive) ctx.ui.warning(_('Fetching source from mirror: %s') % url) fetch_url(url, ctx.config.archives_dir()) return except pisi.fetcher.FetchError: pass raise pisi.fetcher.FetchError(_('Could not fetch source from %s mirrors.') % name);
def fetch_from_mirror(): uri = URI.get_uri() sep = uri[len("mirrors://"):].split("/") name = sep.pop(0) archive = "/".join(sep) mirrors = Mirrors().get_mirrors(name) if not mirrors: raise Error(_("%s mirrors are not defined.") % name) for mirror in mirrors: try: url = os.path.join(mirror, archive) ctx.ui.warning(_('Fetching source from mirror: %s') % url) fetch_url(url, ctx.config.archives_dir()) return except pisi.fetcher.FetchError: pass raise pisi.fetcher.FetchError( _('Could not fetch source from %s mirrors.') % name)
def __init__(self, uri, mode, transfer_dir = "/tmp"): "it is pointless to open a file without a URI and a mode" uri = File.make_uri(uri) if mode==File.read or mode==File.write: self.mode = mode else: raise Error(_("File mode must be either File.read or File.write")) if uri.is_remote_file(): if self.mode == File.read: ctx.ui.info(_("Fetching %s") % uri.get_uri()) localfile = join(transfer_dir, uri.filename()) fetch_url(uri, transfer_dir) # FIXME: localfile would look better for fetch iface? else: raise Error(_("Remote write not implemented")) else: localfile = uri.get_uri() #TODO: use a special function here? if self.mode == File.read: access = 'r' else: access = 'w' self.__file__ = file(localfile, access)
raise pisi.fetcher.FetchError(_('Could not fetch source from %s mirrors.') % name); if __name__ == "__main__": pisi.api.init(database=False, options='') try: packages = scanPSPEC(sys.argv[1]) except: print "Usage: fetchAll.py path2repo" sys.exit(1) for package in packages: spec = pisi.specfile.SpecFile() spec.read(os.path.join(package, "pspec.xml")) URI = pisi.uri.URI(spec.source.archive.uri) if not isCached(URI.filename(), spec.source.archive.sha1sum): print URI, " -> " , os.path.join(ctx.config.archives_dir(), URI.filename()) try: if URI.get_uri().startswith("mirrors://"): fetch_from_mirror() else: fetch_url(URI, ctx.config.archives_dir()) except pisi.fetcher.FetchError, e: print e pass else: print URI, "already downloaded..." pisi.api.finalize()
os.path.join(ctx.config.archives_dir(), file), sha1sum) except: pass if __name__ == "__main__": pisi.api.init(database=False, options='') try: packages = scanPSPEC(sys.argv[1]) except: print "Usage: fetchAll.py path2repo" sys.exit(1) for package in packages: spec = pisi.specfile.SpecFile() spec.read(os.path.join(package, "pspec.xml")) URI = pisi.uri.URI(spec.source.archive.uri) if not isCached(URI.filename(), spec.source.archive.sha1sum): print URI, " -> ", os.path.join(ctx.config.archives_dir(), URI.filename()) try: fetch_url(URI, ctx.config.archives_dir()) except pisi.fetcher.FetchError, e: print e pass else: print URI, "already downloaded..." pisi.api.finalize()
def fetch_from_fallback(self): archive = basename(self.url.get_uri()) src = join(ctx.config.values.build.fallback, archive) ctx.ui.warning(_('Trying fallback address: %s') % src) fetch_url(src, ctx.config.archives_dir(), self.progress)
def main(): global options # Parse options parser = OptionParser(usage="%prog [options]", version="%prog 1.0") parser.add_option("-N", "--no-color", action="store_false", dest="color", default=True, help=_("don't use colors")) parser.add_option("-p", "--packages", action="store_true", dest="packages", default=False, help=_("show package names")) parser.add_option("-l", "--long", action="store_true", dest="long", default=False, help=_("show details of advisories")) parser.add_option("-a", "--all", action="store_false", dest="affected", default=True, help=_("show all advisories")) parser.add_option("-F", "--no-fetch", action="store_false", dest="fetch", default=True, help=_("don't download PLSA index")) (options, args) = parser.parse_args() # Get locale lang = os.environ["LC_ALL"].split("_")[0] # Show package details in --long if options.long: options.packages = True # Create work directory if not os.access("/tmp/plsa", os.F_OK): os.mkdir("/tmp/plsa") # Init PISI API pisi.api.init(database=True, comar=False, write=False) # Get installed packages installed_packages = {} for package in ctx.installdb.list_installed(): # Release comparison seems enough installed_packages[package] = int(ctx.installdb.get_version(package)[1]) # List of orphaned packages orphaned = [] # Get list of reporsitories plsas = {} for repo in ctx.repodb.list(): uri = ctx.repodb.get_repo(repo).indexuri.get_uri() plsafile = "%s/plsa-index.xml.bz2" % uri[0:uri.rfind("/")] tmpfile = "/tmp/plsa/%s.xml" % repo if options.fetch: print _("Downloading PLSA database of %s") % repo try: fetch_url(plsafile, "/tmp/plsa", progress=ctx.ui.Progress) except FetchError, e: print _("Unable to download %s: %s") % (plsafile, e) continue print _("Checking file integrity of %s") % repo try: fetch_url("%s.sha1sum" % plsafile, "/tmp/plsa") except FetchError, e: print _("Unable to download checksum of %s") % repo continue orig_sha1sum = file("%s.sha1sum" % plsafile).readlines()[0].split()[0] if sha1_file(plsafile) != orig_sha1sum: print _("File integrity of %s compromised.") % plsafile continue print _("Unpacking PLSA database of %s") % repo try: File.decompress("/tmp/plsa/plsa-index.xml.bz2", File.bz2) except: print _("Unable to decompress %s") % plsafile continue os.rename("/tmp/plsa/plsa-index.xml", tmpfile) os.unlink("/tmp/plsa/plsa-index.xml.bz2") plsas[repo] = tmpfile