def __download_cpio_archive(self, apiurl, project, repo, arch, package, **pkgs): if not pkgs: return query = ['binary=%s' % quote_plus(i) for i in pkgs] query.append('view=cpio') tmparchive = tmpfile = None try: (fd, tmparchive) = tempfile.mkstemp(prefix='osc_build_cpio') (fd, tmpfile) = tempfile.mkstemp(prefix='osc_build') url = makeurl(apiurl, ['build', project, repo, arch, package], query=query) sys.stdout.write("preparing download ...\r") sys.stdout.flush() self.gr.urlgrab(url, filename=tmparchive, text='fetching packages for \'%s\'' % project) archive = cpio.CpioRead(tmparchive) archive.read() for hdr in archive: # XXX: we won't have an .errors file because we're using # getbinarylist instead of the public/... route (which is # routed to getbinaries (but that won't work for kiwi products)) if hdr.filename == '.errors': archive.copyin_file(hdr.filename) raise oscerr.APIError( 'CPIO archive is incomplete (see .errors file)') if package == '_repository': n = re.sub(r'\.pkg\.tar\..z$', '.arch', hdr.filename) pac = pkgs[n.rsplit('.', 1)[0]] else: # this is a kiwi product pac = pkgs[hdr.filename] archive.copyin_file(hdr.filename, os.path.dirname(tmpfile), os.path.basename(tmpfile)) self.move_package(tmpfile, pac.localdir, pac) # check if we got all packages... (because we've no .errors file) for pac in pkgs.itervalues(): if not os.path.isfile(pac.fullfilename): raise oscerr.APIError('failed to fetch file \'%s\': ' \ 'does not exist in CPIO archive' % pac.repofilename) except URLGrabError, e: if e.errno != 14 or e.code != 414: raise # query str was too large keys = pkgs.keys() if len(keys) == 1: raise oscerr.APIError( 'unable to fetch cpio archive: server always returns code 414' ) n = len(pkgs) / 2 new_pkgs = dict([(k, pkgs[k]) for k in keys[:n]]) self.__download_cpio_archive(apiurl, project, repo, arch, package, **new_pkgs) new_pkgs = dict([(k, pkgs[k]) for k in keys[n:]]) self.__download_cpio_archive(apiurl, project, repo, arch, package, **new_pkgs)
def __download_cpio_archive(self, apiurl, project, repo, arch, package, **pkgs): if not pkgs: return query = ['binary=%s' % quote_plus(i) for i in pkgs] query.append('view=cpio') tmparchive = tmpfile = None try: (fd, tmparchive) = tempfile.mkstemp(prefix='osc_build_cpio') (fd, tmpfile) = tempfile.mkstemp(prefix='osc_build') url = makeurl(apiurl, ['build', project, repo, arch, package], query=query) sys.stdout.write("preparing download ...\r") sys.stdout.flush() self.gr.urlgrab(url, filename = tmparchive, text = 'fetching packages for \'%s\'' % project) archive = cpio.CpioRead(tmparchive) archive.read() for hdr in archive: # XXX: we won't have an .errors file because we're using # getbinarylist instead of the public/... route (which is # routed to getbinaries (but that won't work for kiwi products)) if hdr.filename == '.errors': archive.copyin_file(hdr.filename) raise oscerr.APIError('CPIO archive is incomplete (see .errors file)') if package == '_repository': n = re.sub(r'\.pkg\.tar\..z$', '.arch', hdr.filename) pac = pkgs[n.rsplit('.', 1)[0]] else: # this is a kiwi product pac = pkgs[hdr.filename] archive.copyin_file(hdr.filename, os.path.dirname(tmpfile), os.path.basename(tmpfile)) self.move_package(tmpfile, pac.localdir, pac) # check if we got all packages... (because we've no .errors file) for pac in pkgs.itervalues(): if not os.path.isfile(pac.fullfilename): raise oscerr.APIError('failed to fetch file \'%s\': ' \ 'does not exist in CPIO archive' % pac.repofilename) except URLGrabError, e: if e.errno != 14 or e.code != 414: raise # query str was too large keys = pkgs.keys() if len(keys) == 1: raise oscerr.APIError('unable to fetch cpio archive: server always returns code 414') n = len(pkgs) / 2 new_pkgs = dict([(k, pkgs[k]) for k in keys[:n]]) self.__download_cpio_archive(apiurl, project, repo, arch, package, **new_pkgs) new_pkgs = dict([(k, pkgs[k]) for k in keys[n:]]) self.__download_cpio_archive(apiurl, project, repo, arch, package, **new_pkgs)
def __fetch_cpio(self, apiurl): from urllib import quote_plus for prpap, pkgs in self.cpio.iteritems(): project, repo, arch, package = prpap.split('/', 3) query = ['binary=%s' % quote_plus(i) for i in pkgs.keys()] query.append('view=cpio') tmparchive = tmpfile = None try: (fd, tmparchive) = tempfile.mkstemp(prefix='osc_build_cpio') (fd, tmpfile) = tempfile.mkstemp(prefix='osc_build') url = makeurl(apiurl, ['build', project, repo, arch, package], query=query) sys.stdout.write("preparing download ...\r") sys.stdout.flush() self.gr.urlgrab(url, filename=tmparchive, text='fetching packages for \'%s\'' % project) archive = cpio.CpioRead(tmparchive) archive.read() for hdr in archive: # XXX: we won't have an .errors file because we're using # getbinarylist instead of the public/... route (which is # routed to getbinaries (but that won't work for kiwi products)) if hdr.filename == '.errors': archive.copyin_file(hdr.filename) raise oscerr.APIError( 'CPIO archive is incomplete (see .errors file)') if package == '_repository': pac = pkgs[hdr.filename.rsplit('.', 1)[0]] else: # this is a kiwi product pac = pkgs[hdr.filename] archive.copyin_file(hdr.filename, os.path.dirname(tmpfile), os.path.basename(tmpfile)) self.move_package(tmpfile, pac.localdir, pac) # check if we got all packages... (because we've no .errors file) for pac in pkgs.itervalues(): if not os.path.isfile(pac.fullfilename): raise oscerr.APIError('failed to fetch file \'%s\': ' \ 'does not exist in CPIO archive' % pac.repofilename) finally: if not tmparchive is None and os.path.exists(tmparchive): os.unlink(tmparchive) if not tmpfile is None and os.path.exists(tmpfile): os.unlink(tmpfile)
def __fetch_cpio(self, apiurl): from urllib import quote_plus for prpap, pkgs in self.cpio.iteritems(): project, repo, arch, package = prpap.split("/", 3) query = ["binary=%s" % quote_plus(i) for i in pkgs.keys()] query.append("view=cpio") tmparchive = tmpfile = None try: (fd, tmparchive) = tempfile.mkstemp(prefix="osc_build_cpio") (fd, tmpfile) = tempfile.mkstemp(prefix="osc_build") url = makeurl(apiurl, ["build", project, repo, arch, package], query=query) sys.stdout.write("preparing download ...\r") sys.stdout.flush() self.gr.urlgrab(url, filename=tmparchive, text="fetching packages for '%s'" % project) archive = cpio.CpioRead(tmparchive) archive.read() for hdr in archive: # XXX: we won't have an .errors file because we're using # getbinarylist instead of the public/... route (which is # routed to getbinaries (but that won't work for kiwi products)) if hdr.filename == ".errors": archive.copyin_file(hdr.filename) raise oscerr.APIError("CPIO archive is incomplete (see .errors file)") if package == "_repository": n = re.sub(r"\.pkg\.tar\..z$", ".arch", hdr.filename) pac = pkgs[n.rsplit(".", 1)[0]] else: # this is a kiwi product pac = pkgs[hdr.filename] archive.copyin_file(hdr.filename, os.path.dirname(tmpfile), os.path.basename(tmpfile)) self.move_package(tmpfile, pac.localdir, pac) # check if we got all packages... (because we've no .errors file) for pac in pkgs.itervalues(): if not os.path.isfile(pac.fullfilename): raise oscerr.APIError( "failed to fetch file '%s': " "does not exist in CPIO archive" % pac.repofilename ) finally: if not tmparchive is None and os.path.exists(tmparchive): os.unlink(tmparchive) if not tmpfile is None and os.path.exists(tmpfile): os.unlink(tmpfile)
def run(self, buildinfo): cached = 0 all = len(buildinfo.deps) for i in buildinfo.deps: i.makeurls(self.cachedir, self.urllist) if os.path.exists(i.fullfilename): cached += 1 miss = 0 needed = all - cached if all: miss = 100.0 * needed / all print "%.1f%% cache miss. %d/%d dependencies cached.\n" % (miss, cached, all) done = 1 for i in buildinfo.deps: i.makeurls(self.cachedir, self.urllist) if not os.path.exists(i.fullfilename): if self.offline: raise oscerr.OscIOError(None, 'Missing package \'%s\' in cache: --offline not possible.' % i.fullfilename) self.dirSetup(i) try: # if there isn't a progress bar, there is no output at all if not self.progress_obj: print '%d/%d (%s) %s' % (done, needed, i.project, i.filename) self.fetch(i) if self.progress_obj: print " %d/%d\r" % (done, needed), sys.stdout.flush() except KeyboardInterrupt: print 'Cancelled by user (ctrl-c)' print 'Exiting.' sys.exit(0) done += 1 self.__fetch_cpio(buildinfo.apiurl) prjs = buildinfo.projects.keys() for i in prjs: dest = "%s/%s" % (self.cachedir, i) if not os.path.exists(dest): os.makedirs(dest, mode=0755) dest += '/_pubkey' url = makeurl(buildinfo.apiurl, ['source', i, '_pubkey']) try: if self.offline and not os.path.exists(dest): # may need to try parent raise URLGrabError(2) elif not self.offline: OscFileGrabber().urlgrab(url, dest) if not i in buildinfo.prjkeys: # not that many keys usually buildinfo.keys.append(dest) buildinfo.prjkeys.append(i) except KeyboardInterrupt: print 'Cancelled by user (ctrl-c)' print 'Exiting.' if os.path.exists(dest): os.unlink(dest) sys.exit(0) except URLGrabError, e: if self.http_debug: print >>sys.stderr, "can't fetch key for %s: %s" %(i, e.strerror) print >>sys.stderr, "url: %s" % url if os.path.exists(dest): os.unlink(dest) l = i.rsplit(':', 1) # try key from parent project if len(l) > 1 and l[1] and not l[0] in buildinfo.projects: prjs.append(l[0])
def run(self, buildinfo): cached = 0 all = len(buildinfo.deps) for i in buildinfo.deps: i.makeurls(self.cachedir, self.urllist) if os.path.exists(i.fullfilename): cached += 1 miss = 0 needed = all - cached if all: miss = 100.0 * needed / all print "%.1f%% cache miss. %d/%d dependencies cached.\n" % (miss, cached, all) done = 1 for i in buildinfo.deps: i.makeurls(self.cachedir, self.urllist) if not os.path.exists(i.fullfilename): if self.offline: raise oscerr.OscIOError( None, 'Missing package \'%s\' in cache: --offline not possible.' % i.fullfilename) self.dirSetup(i) try: # if there isn't a progress bar, there is no output at all if not self.progress_obj: print '%d/%d (%s) %s' % (done, needed, i.project, i.filename) self.fetch(i) if self.progress_obj: print " %d/%d\r" % (done, needed), sys.stdout.flush() except KeyboardInterrupt: print 'Cancelled by user (ctrl-c)' print 'Exiting.' sys.exit(0) done += 1 self.__fetch_cpio(buildinfo.apiurl) prjs = buildinfo.projects.keys() for i in prjs: dest = "%s/%s" % (self.cachedir, i) if not os.path.exists(dest): os.makedirs(dest, mode=0755) dest += '/_pubkey' url = makeurl(buildinfo.apiurl, ['source', i, '_pubkey']) try: if self.offline and not os.path.exists(dest): # may need to try parent raise URLGrabError(2) elif not self.offline: OscFileGrabber().urlgrab(url, dest) if not i in buildinfo.prjkeys: # not that many keys usually buildinfo.keys.append(dest) buildinfo.prjkeys.append(i) except KeyboardInterrupt: print 'Cancelled by user (ctrl-c)' print 'Exiting.' if os.path.exists(dest): os.unlink(dest) sys.exit(0) except URLGrabError, e: if self.http_debug: print >> sys.stderr, "can't fetch key for %s: %s" % ( i, e.strerror) print >> sys.stderr, "url: %s" % url if os.path.exists(dest): os.unlink(dest) l = i.rsplit(':', 1) # try key from parent project if len(l) > 1 and l[1] and not l[0] in buildinfo.projects: prjs.append(l[0])