def go(self, d, urls = []): """Fetch urls""" if not urls: urls = self.urls for loc in urls: (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, d)) tag = gettag(parm) proto = getprotocol(parm) gitsrcname = '%s%s' % (host, path.replace('/', '.')) repofilename = 'git_%s.tar.gz' % (gitsrcname) repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename) repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) coname = '%s' % (tag) codir = os.path.join(repodir, coname) cofile = self.localpath(loc, d) # tag=="master" must always update if (tag != "master") and Fetch.try_mirror(d, localfile(loc, d)): bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping git checkout." % cofile) continue if not os.path.exists(repodir): if Fetch.try_mirror(d, repofilename): bb.mkdirhier(repodir) os.chdir(repodir) rungitcmd("tar -xzf %s" % (repofile),d) else: rungitcmd("git clone -n %s://%s%s %s" % (proto, host, path, repodir),d) os.chdir(repodir) rungitcmd("git pull %s://%s%s" % (proto, host, path),d) rungitcmd("git pull --tags %s://%s%s" % (proto, host, path),d) rungitcmd("git prune-packed", d) rungitcmd("git pack-redundant --all | xargs -r rm", d) # Remove all but the .git directory rungitcmd("rm * -Rf", d) # old method of downloading tags #rungitcmd("rsync -a --verbose --stats --progress rsync://%s%s/ %s" % (host, path, os.path.join(repodir, ".git", "")),d) os.chdir(repodir) bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository") rungitcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ),d) if os.path.exists(codir): prunedir(codir) bb.mkdirhier(codir) os.chdir(repodir) rungitcmd("git read-tree %s" % (tag),d) rungitcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")),d) os.chdir(codir) bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout") rungitcmd("tar -czf %s %s" % (cofile, os.path.join(".", "*") ),d)
def go(self, loc, ud, d): """Fetch url""" if Fetch.try_mirror(d, ud.localfile): bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping git checkout." % ud.localpath) return if ud.user: username = ud.user + '@' else: username = "" gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.')) repofilename = 'git_%s.tar.gz' % (gitsrcname) repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename) repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) coname = '%s' % (ud.tag) codir = os.path.join(repodir, coname) if not os.path.exists(repodir): if Fetch.try_mirror(d, repofilename): bb.mkdirhier(repodir) os.chdir(repodir) runfetchcmd("tar -xzf %s" % (repofile), d) else: runfetchcmd("git clone -n %s://%s%s%s %s" % (ud.proto, username, ud.host, ud.path, repodir), d) os.chdir(repodir) # Remove all but the .git directory if not self._contains_ref(ud.tag, d): runfetchcmd("rm * -Rf", d) runfetchcmd("git fetch %s://%s%s%s %s" % (ud.proto, username, ud.host, ud.path, ud.branch), d) runfetchcmd("git fetch --tags %s://%s%s%s" % (ud.proto, username, ud.host, ud.path), d) runfetchcmd("git prune-packed", d) runfetchcmd("git pack-redundant --all | xargs -r rm", d) os.chdir(repodir) mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) if mirror_tarballs != "0": bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository") runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d) if os.path.exists(codir): bb.utils.prunedir(codir) bb.mkdirhier(codir) os.chdir(repodir) runfetchcmd("git read-tree %s" % (ud.tag), d) runfetchcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")), d) os.chdir(codir) bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout") runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d) os.chdir(repodir) bb.utils.prunedir(codir)
def go(self, loc, ud, d): """Fetch url""" if Fetch.try_mirror(d, ud.localfile): bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping git checkout." % ud.localpath) return gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.')) repofilename = 'git_%s.tar.gz' % (gitsrcname) repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename) repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) coname = '%s' % (ud.tag) codir = os.path.join(repodir, coname) if not os.path.exists(repodir): if Fetch.try_mirror(d, repofilename): bb.mkdirhier(repodir) os.chdir(repodir) runfetchcmd("tar -xzf %s" % (repofile), d) else: runfetchcmd("git clone -n %s://%s%s %s" % (ud.proto, ud.host, ud.path, repodir), d) os.chdir(repodir) # Remove all but the .git directory runfetchcmd("rm * -Rf", d) runfetchcmd("git pull %s://%s%s" % (ud.proto, ud.host, ud.path), d) runfetchcmd("git pull --tags %s://%s%s" % (ud.proto, ud.host, ud.path), d) runfetchcmd("git prune-packed", d) runfetchcmd("git pack-redundant --all | xargs -r rm", d) # old method of downloading tags #runfetchcmd("rsync -a --verbose --stats --progress rsync://%s%s/ %s" % (ud.host, ud.path, os.path.join(repodir, ".git", "")), d) os.chdir(repodir) bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository") runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d) if os.path.exists(codir): prunedir(codir) bb.mkdirhier(codir) os.chdir(repodir) runfetchcmd("git read-tree %s" % (ud.tag), d) runfetchcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")), d) os.chdir(codir) bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout") runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d) os.chdir(repodir) prunedir(codir)
def localpath(url, d): (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) if "localpath" in parm: # if user overrides local path, use it. return parm["localpath"] if not "module" in parm: raise MissingParameterError("svn method needs a 'module' parameter") else: module = parm["module"] if "rev" in parm: revision = parm["rev"] else: revision = "" date = Fetch.getSRCDate(d) if "srcdate" in parm: date = parm["srcdate"] if revision: date = "" return os.path.join( data.getVar("DL_DIR", d, 1), data.expand( "%s_%s_%s_%s_%s.tar.gz" % (module.replace("/", "."), host, path.replace("/", "."), revision, date), d ), )
def localpath(self, url, ud, d): if not "module" in ud.parm: raise MissingParameterError("hg method needs a 'module' parameter") ud.module = ud.parm["module"] # Create paths to mercurial checkouts relpath = self._strip_leading_slashes(ud.path) ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath) ud.moddir = os.path.join(ud.pkgdir, ud.module) if 'rev' in ud.parm: ud.revision = ud.parm['rev'] else: tag = Fetch.srcrev_internal_helper(ud, d) if tag is True: ud.revision = self.latest_revision(url, ud, d) elif tag: ud.revision = tag else: ud.revision = self.latest_revision(url, ud, d) ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d) return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def localpath(self, url, ud, d): if 'protocol' in ud.parm: ud.proto = ud.parm['protocol'] elif not ud.host: ud.proto = 'file' else: ud.proto = "rsync" ud.branch = ud.parm.get("branch", "master") tag = Fetch.srcrev_internal_helper(ud, d) if tag is True: ud.tag = self.latest_revision(url, ud, d) elif tag: ud.tag = tag else: ud.tag = "" if not ud.tag or ud.tag == "master": ud.tag = self.latest_revision(url, ud, d) ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.tag), d) return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def localpath(self, url, ud, d): if not "module" in ud.parm: raise MissingParameterError( "osc method needs a 'module' parameter.") ud.module = ud.parm["module"] # Create paths to osc checkouts relpath = ud.path if relpath.startswith('/'): # Remove leading slash as os.path.join can't cope relpath = relpath[1:] ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host) ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module) if 'rev' in ud.parm: ud.revision = ud.parm['rev'] else: pv = data.getVar("PV", d, 0) rev = Fetch.srcrev_internal_helper(ud, d) if rev and rev != True: ud.revision = rev else: ud.revision = "" ud.localfile = data.expand( '%s_%s_%s.tar.gz' % (ud.module.replace( '/', '.'), ud.path.replace('/', '.'), ud.revision), d) return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def go(self, loc, ud, d): """Fetch url""" # try to use the tarball stash if Fetch.try_mirror(d, ud.localfile): bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping bzr checkout." % ud.localpath) return if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK): bzrcmd = self._buildbzrcommand(ud, d, "update") bb.msg.debug(1, bb.msg.domain.Fetcher, "BZR Update %s" % loc) os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path))) runfetchcmd(bzrcmd, d) else: os.system("rm -rf %s" % os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir))) bzrcmd = self._buildbzrcommand(ud, d, "fetch") bb.msg.debug(1, bb.msg.domain.Fetcher, "BZR Checkout %s" % loc) bb.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % bzrcmd) runfetchcmd(bzrcmd, d) os.chdir(ud.pkgdir) # tar them up to a defined filename try: runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.pkgdir)), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def localpath(self, url, ud, d): if not "module" in ud.parm: raise MissingParameterError("osc method needs a 'module' parameter.") ud.module = ud.parm["module"] # Create paths to osc checkouts relpath = ud.path if relpath.startswith('/'): # Remove leading slash as os.path.join can't cope relpath = relpath[1:] ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host) ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module) if 'rev' in ud.parm: ud.revision = ud.parm['rev'] else: pv = data.getVar("PV", d, 0) rev = Fetch.srcrev_internal_helper(ud, d) if rev and rev != True: ud.revision = rev else: ud.revision = "" ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d) return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def localpath(url, d): (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) if "localpath" in parm: # if user overrides local path, use it. return parm["localpath"] if not "module" in parm: raise MissingParameterError("cvs method needs a 'module' parameter") else: module = parm["module"] if "tag" in parm: tag = parm["tag"] else: tag = "" if "date" in parm: date = parm["date"] else: if not tag: date = Fetch.getSRCDate(d) else: date = "" return os.path.join( data.getVar("DL_DIR", d, 1), data.expand("%s_%s_%s_%s.tar.gz" % (module.replace("/", "."), host, tag, date), d), )
def localpath(self, url, ud, d): ud.proto = "rsync" if 'protocol' in ud.parm: ud.proto = ud.parm['protocol'] ud.branch = ud.parm.get("branch", "master") tag = Fetch.srcrev_internal_helper(ud, d) if tag is True: ud.tag = self.latest_revision(url, ud, d) elif tag: ud.tag = tag if not ud.tag: ud.tag = self.latest_revision(url, ud, d) if ud.tag == "master": ud.tag = self.latest_revision(url, ud, d) ud.localfile = data.expand( 'git_%s%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.tag), d) return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def go(self, loc, ud, d): """Fetch urls""" if not self.forcefetch(loc, ud, d) and Fetch.try_mirror( d, ud.localfile): return svkroot = ud.host + ud.path # pyflakes claims date is not known... it looks right svkcmd = "svk co -r {%s} %s/%s" % (date, svkroot, ud.module) if ud.revision: svkcmd = "svk co -r %s/%s" % (ud.revision, svkroot, ud.module) # create temp directory localdata = data.createCopy(d) data.update_data(localdata) bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory") bb.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) tmppipe = os.popen( data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: bb.msg.error( bb.msg.domain.Fetcher, "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH." ) raise FetchError(ud.module) # check out sources there os.chdir(tmpfile) bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svkcmd) myret = os.system(svkcmd) if myret != 0: try: os.rmdir(tmpfile) except OSError: pass raise FetchError(ud.module) os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) # tar them up to a defined filename myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module))) if myret != 0: try: os.unlink(ud.localpath) except OSError: pass raise FetchError(ud.module) # cleanup os.system('rm -rf %s' % tmpfile)
def go(self, loc, ud, d): """Fetch urls""" if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile): return svkroot = ud.host + ud.path # pyflakes claims date is not known... it looks right svkcmd = "svk co -r {%s} %s/%s" % (date, svkroot, ud.module) if ud.revision: svkcmd = "svk co -r %s/%s" % (ud.revision, svkroot, ud.module) # create temp directory localdata = data.createCopy(d) data.update_data(localdata) bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory") bb.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: bb.msg.error(bb.msg.domain.Fetcher, "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") raise FetchError(ud.module) # check out sources there os.chdir(tmpfile) bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svkcmd) myret = os.system(svkcmd) if myret != 0: try: os.rmdir(tmpfile) except OSError: pass raise FetchError(ud.module) os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) # tar them up to a defined filename myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module))) if myret != 0: try: os.unlink(ud.localpath) except OSError: pass raise FetchError(ud.module) # cleanup os.system('rm -rf %s' % tmpfile)
def go(self, loc, ud, d): """Fetch url""" # try to use the tarball stash if Fetch.try_mirror(d, ud.localfile): bb.msg.debug( 1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping hg checkout." % ud.localpath) return bb.msg.debug( 2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): updatecmd = self._buildhgcommand(ud, d, "pull") bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) # update sources there os.chdir(ud.moddir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % updatecmd) runfetchcmd(updatecmd, d) else: fetchcmd = self._buildhgcommand(ud, d, "fetch") bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) # check out sources there bb.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % fetchcmd) runfetchcmd(fetchcmd, d) # Even when we clone (fetch), we still need to update as hg's clone # won't checkout the specified revision if its on a branch updatecmd = self._buildhgcommand(ud, d, "update") bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % updatecmd) runfetchcmd(updatecmd, d) os.chdir(ud.pkgdir) try: runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def localpath(self, url, ud, d): if not "module" in ud.parm: raise MissingParameterError("svn method needs a 'module' parameter") ud.module = ud.parm["module"] # Create paths to svn checkouts relpath = ud.path if relpath.startswith("/"): # Remove leading slash as os.path.join can't cope relpath = relpath[1:] ud.pkgdir = os.path.join(data.expand("${SVNDIR}", d), ud.host, relpath) ud.moddir = os.path.join(ud.pkgdir, ud.module) if "rev" in ud.parm: ud.date = "" ud.revision = ud.parm["rev"] elif "date" in ud.date: ud.date = ud.parm["date"] ud.revision = "" else: # # ***Nasty hack*** # If DATE in unexpanded PV, use ud.date (which is set from SRCDATE) # Should warn people to switch to SRCREV here # pv = data.getVar("PV", d, 0) if "DATE" in pv: ud.revision = "" else: rev = Fetch.srcrev_internal_helper(ud, d) if rev is True: ud.revision = self.latest_revision(url, ud, d) ud.date = "" elif rev: ud.revision = rev ud.date = "" else: ud.revision = "" ud.localfile = data.expand( "%s_%s_%s_%s_%s.tar.gz" % (ud.module.replace("/", "."), ud.host, ud.path.replace("/", "."), ud.revision, ud.date), d, ) return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def go(self, loc, ud, d): """ Fetch url """ # Try to use the tarball stash if Fetch.try_mirror(d, ud.localfile): bb.msg.debug( 1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping osc checkout." % ud.localpath) return bb.msg.debug( 2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access( os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK): oscupdatecmd = self._buildosccommand(ud, d, "update") bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) # update sources there os.chdir(ud.moddir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % oscupdatecmd) runfetchcmd(oscupdatecmd, d) else: oscfetchcmd = self._buildosccommand(ud, d, "fetch") bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) # check out sources there bb.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % oscfetchcmd) runfetchcmd(oscfetchcmd, d) os.chdir(os.path.join(ud.pkgdir + ud.path)) # tar them up to a defined filename try: runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def localpath(self, url, ud, d): if 'protocol' in ud.parm: ud.proto = ud.parm['protocol'] elif not ud.host: ud.proto = 'file' else: ud.proto = "rsync" ud.branch = ud.parm.get("branch", "master") gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.')) ud.mirrortarball = 'git_%s.tar.gz' % (gitsrcname) ud.clonedir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) tag = Fetch.srcrev_internal_helper(ud, d) if tag is True: ud.tag = self.latest_revision(url, ud, d) elif tag: ud.tag = tag if not ud.tag or ud.tag == "master": ud.tag = self.latest_revision(url, ud, d) subdir = ud.parm.get("subpath", "") if subdir != "": if subdir.endswith("/"): subdir = subdir[:-1] subdirpath = os.path.join(ud.path, subdir) else: subdirpath = ud.path if 'fullclone' in ud.parm: ud.localfile = ud.mirrortarball else: ud.localfile = data.expand( 'git_%s%s_%s.tar.gz' % (ud.host, subdirpath.replace('/', '.'), ud.tag), d) ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git" if 'noclone' in ud.parm: ud.localfile = None return None return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def sortable_revision(self, url, ud, d): """ """ pd = bb.persist_data.persist(d) localcounts = pd['BB_URI_LOCALCOUNT'] key = self.generate_revision_key(url, ud, d, branch=True) oldkey = self.generate_revision_key(url, ud, d, branch=False) latest_rev = self._build_revision(url, ud, d) last_rev = localcounts[key + '_rev'] if last_rev is None: last_rev = localcounts[oldkey + '_rev'] if last_rev is not None: del localcounts[oldkey + '_rev'] localcounts[key + '_rev'] = last_rev uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False count = None if uselocalcount: count = Fetch.localcount_internal_helper(ud, d) if count is None: count = localcounts[key + '_count'] if count is None: count = localcounts[oldkey + '_count'] if count is not None: del localcounts[oldkey + '_count'] localcounts[key + '_count'] = count if last_rev == latest_rev: return str(count + "+" + latest_rev) buildindex_provided = hasattr(self, "_sortable_buildindex") if buildindex_provided: count = self._sortable_buildindex(url, ud, d, latest_rev) if count is None: count = "0" elif uselocalcount or buildindex_provided: count = str(count) else: count = str(int(count) + 1) localcounts[key + '_rev'] = latest_rev localcounts[key + '_count'] = count return str(count + "+" + latest_rev)
def sortable_revision(self, url, ud, d): """ """ localcounts = bb.persist_data.persist('BB_URI_LOCALCOUNT', d) key = self.generate_revision_key(url, ud, d, branch=True) oldkey = self.generate_revision_key(url, ud, d, branch=False) latest_rev = self._build_revision(url, ud, d) last_rev = localcounts[key + '_rev'] if last_rev is None: last_rev = localcounts[oldkey + '_rev'] if last_rev is not None: del localcounts[oldkey + '_rev'] localcounts[key + '_rev'] = last_rev uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False count = None if uselocalcount: count = Fetch.localcount_internal_helper(ud, d) if count is None: count = localcounts[key + '_count'] if count is None: count = localcounts[oldkey + '_count'] if count is not None: del localcounts[oldkey + '_count'] localcounts[key + '_count'] = count if last_rev == latest_rev: return str(count + "+" + latest_rev) buildindex_provided = hasattr(self, "_sortable_buildindex") if buildindex_provided: count = self._sortable_buildindex(url, ud, d, latest_rev) if count is None: count = "0" elif uselocalcount or buildindex_provided: count = str(count) else: count = str(int(count) + 1) localcounts[key + '_rev'] = latest_rev localcounts[key + '_count'] = count return str(count + "+" + latest_rev)
def localpath (self, url, ud, d): # Create paths to bzr checkouts relpath = self._strip_leading_slashes(ud.path) ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath) revision = Fetch.srcrev_internal_helper(ud, d) if revision is True: ud.revision = self.latest_revision(url, ud, d) elif revision: ud.revision = revision if not ud.revision: ud.revision = self.latest_revision(url, ud, d) ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d) return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def localpath(url, d): (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) if "localpath" in parm: # if user overrides local path, use it. return parm["localpath"] if not "module" in parm: raise MissingParameterError("svk method needs a 'module' parameter") else: module = parm["module"] if 'rev' in parm: revision = parm['rev'] else: revision = "" date = Fetch.getSRCDate(d) return os.path.join(data.getVar("DL_DIR", d, 1),data.expand('%s_%s_%s_%s_%s.tar.gz' % ( module.replace('/', '.'), host, path.replace('/', '.'), revision, date), d))
def localpath(self, url, ud, d): if 'protocol' in ud.parm: ud.proto = ud.parm['protocol'] elif not ud.host: ud.proto = 'file' else: ud.proto = "rsync" ud.branch = ud.parm.get("branch", "master") gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.')) ud.mirrortarball = 'git_%s.tar.gz' % (gitsrcname) ud.clonedir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) tag = Fetch.srcrev_internal_helper(ud, d) if tag is True: ud.tag = self.latest_revision(url, ud, d) elif tag: ud.tag = tag if not ud.tag or ud.tag == "master": ud.tag = self.latest_revision(url, ud, d) subdir = ud.parm.get("subpath", "") if subdir != "": if subdir.endswith("/"): subdir = subdir[:-1] subdirpath = os.path.join(ud.path, subdir); else: subdirpath = ud.path; if 'fullclone' in ud.parm: ud.localfile = ud.mirrortarball else: ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, subdirpath.replace('/', '.'), ud.tag), d) ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git" if 'noclone' in ud.parm: ud.localfile = None return None return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def localpath(self, url, ud, d): if not "module" in ud.parm: raise MissingParameterError( "svn method needs a 'module' parameter") ud.module = ud.parm["module"] # Create paths to svn checkouts relpath = self._strip_leading_slashes(ud.path) ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath) ud.moddir = os.path.join(ud.pkgdir, ud.module) if 'rev' in ud.parm: ud.date = "" ud.revision = ud.parm['rev'] elif 'date' in ud.date: ud.date = ud.parm['date'] ud.revision = "" else: # # ***Nasty hack*** # If DATE in unexpanded PV, use ud.date (which is set from SRCDATE) # Should warn people to switch to SRCREV here # pv = data.getVar("PV", d, 0) if "DATE" in pv: ud.revision = "" else: rev = Fetch.srcrev_internal_helper(ud, d) if rev is True: ud.revision = self.latest_revision(url, ud, d) ud.date = "" elif rev: ud.revision = rev ud.date = "" else: ud.revision = "" ud.localfile = data.expand( '%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace( '/', '.'), ud.revision, ud.date), d) return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def localpath(self, url, ud, d): if 'protocol' in ud.parm: ud.proto = ud.parm['protocol'] elif not ud.host: ud.proto = 'file' else: ud.proto = "rsync" ud.branch = ud.parm.get("branch", "master") tag = Fetch.srcrev_internal_helper(ud, d) if tag is True: ud.tag = self.latest_revision(url, ud, d) elif tag: ud.tag = tag if not ud.tag or ud.tag == "master": ud.tag = self.latest_revision(url, ud, d) if 'repopath' in ud.parm: ud.subdir = "%s" % (ud.parm['repopath']) else: ud.subdir = "" if 'module' in ud.parm: if ud.subdir != "": ud.subdir = os.path.join(ud.subdir, ud.parm['module']) else: ud.subdir = ud.parm['module'] else: ud.subdir = "" if ud.subdir != "": tagDir = "%s_%s" % (ud.subdir, ud.tag) else: tagDir = ud.tag ud.localfile = data.expand( 'git_%s%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), tagDir.replace('/', '.')), d) return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def go(self, loc, ud, d): """Fetch url""" # try to use the tarball stash if Fetch.try_mirror(d, ud.localfile): bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping hg checkout." % ud.localpath) return bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): updatecmd = self._buildhgcommand(ud, d, "pull") bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) # update sources there os.chdir(ud.moddir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % updatecmd) runfetchcmd(updatecmd, d) else: fetchcmd = self._buildhgcommand(ud, d, "fetch") bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) # check out sources there bb.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % fetchcmd) runfetchcmd(fetchcmd, d) # Even when we clone (fetch), we still need to update as hg's clone # won't checkout the specified revision if its on a branch updatecmd = self._buildhgcommand(ud, d, "update") bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % updatecmd) runfetchcmd(updatecmd, d) os.chdir(ud.pkgdir) try: runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def localpath(self, url, ud, d): # Create paths to bzr checkouts relpath = self._strip_leading_slashes(ud.path) ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath) revision = Fetch.srcrev_internal_helper(ud, d) if revision is True: ud.revision = self.latest_revision(url, ud, d) elif revision: ud.revision = revision if not ud.revision: ud.revision = self.latest_revision(url, ud, d) ud.localfile = data.expand( 'bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d) return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def localpath(self, url, ud, d): if not "module" in ud.parm: raise MissingParameterError("svn method needs a 'module' parameter") ud.module = ud.parm["module"] # Create paths to svn checkouts relpath = self._strip_leading_slashes(ud.path) ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath) ud.moddir = os.path.join(ud.pkgdir, ud.module) if 'rev' in ud.parm: ud.date = "" ud.revision = ud.parm['rev'] elif 'date' in ud.date: ud.date = ud.parm['date'] ud.revision = "" else: # # ***Nasty hack*** # If DATE in unexpanded PV, use ud.date (which is set from SRCDATE) # Should warn people to switch to SRCREV here # pv = data.getVar("PV", d, 0) if "DATE" in pv: ud.revision = "" else: rev = Fetch.srcrev_internal_helper(ud, d) if rev is True: ud.revision = self.latest_revision(url, ud, d) ud.date = "" elif rev: ud.revision = rev ud.date = "" else: ud.revision = "" ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d) return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def go(self, loc, ud, d): """Fetch url""" # try to use the tarball stash if Fetch.try_mirror(d, ud.localfile): bb.msg.debug( 1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping svn checkout." % ud.localpath ) return bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(ud.moddir, ".svn"), os.R_OK): svnupdatecmd = self._buildsvncommand(ud, d, "update") bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) # update sources there os.chdir(ud.moddir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnupdatecmd) runfetchcmd(svnupdatecmd, d) else: svnfetchcmd = self._buildsvncommand(ud, d, "fetch") bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) # check out sources there bb.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnfetchcmd) runfetchcmd(svnfetchcmd, d) os.chdir(ud.pkgdir) # tar them up to a defined filename try: runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def go(self, d, urls = []): """Fetch urls""" if not urls: urls = self.urls localdata = data.createCopy(d) data.setVar('OVERRIDES', "svn:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) for loc in urls: (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata)) if not "module" in parm: raise MissingParameterError("svn method needs a 'module' parameter") else: module = parm["module"] dlfile = self.localpath(loc, localdata) dldir = data.getVar('DL_DIR', localdata, 1) # if local path contains the svn # module, consider the dir above it to be the # download directory # pos = dlfile.find(module) # if pos: # dldir = dlfile[:pos] # else: # dldir = os.path.dirname(dlfile) # setup svn options options = [] if 'rev' in parm: revision = parm['rev'] else: revision = "" date = Fetch.getSRCDate(d) if "proto" in parm: proto = parm["proto"] else: proto = "svn" svn_rsh = None if proto == "svn+ssh" and "rsh" in parm: svn_rsh = parm["rsh"] tarfn = data.expand('%s_%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, path.replace('/', '.'), revision, date), localdata) data.setVar('TARFILES', dlfile, localdata) data.setVar('TARFN', tarfn, localdata) # try to use the tarball stash if Fetch.check_for_tarball(d, tarfn, dldir, date): bb.debug(1, "%s already exists or was mirrored, skipping svn checkout." % tarfn) continue olddir = os.path.abspath(os.getcwd()) os.chdir(data.expand(dldir, localdata)) svnroot = host + path data.setVar('SVNROOT', svnroot, localdata) data.setVar('SVNCOOPTS', " ".join(options), localdata) data.setVar('SVNMODULE', module, localdata) svncmd = data.getVar('FETCHCOMMAND', localdata, 1) svncmd = "svn co -r {%s} %s://%s/%s" % (date, proto, svnroot, module) # either use the revision or if SRCDATE is now no braces if revision: svncmd = "svn co -r %s %s://%s/%s" % (revision, proto, svnroot, module) elif date == "now": svncmd = "svn co %s://%s/%s" % (proto, svnroot, module) if svn_rsh: svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd) # create temp directory bb.debug(2, "Fetch: creating temporary directory") bb.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvn.XXXXXX', localdata), localdata) tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") raise FetchError(module) # check out sources there os.chdir(tmpfile) bb.note("Fetch " + loc) bb.debug(1, "Running %s" % svncmd) myret = os.system(svncmd) if myret != 0: try: os.rmdir(tmpfile) except OSError: pass raise FetchError(module) os.chdir(os.path.join(tmpfile, os.path.dirname(module))) # tar them up to a defined filename myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(module))) if myret != 0: try: os.unlink(tarfn) except OSError: pass # cleanup os.system('rm -rf %s' % tmpfile) os.chdir(olddir) del localdata
def go(self, loc, ud, d): """Fetch url""" if ud.user: username = ud.user + '@' else: username = "" repofile = os.path.join(data.getVar("DL_DIR", d, 1), ud.mirrortarball) coname = '%s' % (ud.tag) codir = os.path.join(ud.clonedir, coname) # If we have no existing clone and no mirror tarball, try and obtain one if not os.path.exists(ud.clonedir) and not os.path.exists(repofile): try: Fetch.try_mirrors(ud.mirrortarball) except: pass # If the checkout doesn't exist and the mirror tarball does, extract it if not os.path.exists(ud.clonedir) and os.path.exists(repofile): bb.mkdirhier(ud.clonedir) os.chdir(ud.clonedir) runfetchcmd("tar -xzf %s" % (repofile), d) # If the repo still doesn't exist, fallback to cloning it if not os.path.exists(ud.clonedir): runfetchcmd("%s clone -n %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.clonedir), d) os.chdir(ud.clonedir) # Update the checkout if needed if not self._contains_ref(ud.tag, d) or 'fullclone' in ud.parm: # Remove all but the .git directory runfetchcmd("rm * -Rf", d) if 'fullclone' in ud.parm: runfetchcmd("%s fetch --all" % (ud.basecmd), d) else: runfetchcmd("%s fetch %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.branch), d) runfetchcmd("%s fetch --tags %s://%s%s%s" % (ud.basecmd, ud.proto, username, ud.host, ud.path), d) runfetchcmd("%s prune-packed" % ud.basecmd, d) runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d) # Generate a mirror tarball if needed os.chdir(ud.clonedir) mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) if mirror_tarballs != "0" or 'fullclone' in ud.parm: logger.info("Creating tarball of git repository") runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d) if 'fullclone' in ud.parm: return if os.path.exists(codir): bb.utils.prunedir(codir) subdir = ud.parm.get("subpath", "") if subdir != "": if subdir.endswith("/"): subdirbase = os.path.basename(subdir[:-1]) else: subdirbase = os.path.basename(subdir) else: subdirbase = "" if subdir != "": readpathspec = ":%s" % (subdir) codir = os.path.join(codir, "git") coprefix = os.path.join(codir, subdirbase, "") else: readpathspec = "" coprefix = os.path.join(codir, "git", "") scmdata = ud.parm.get("scmdata", "") if scmdata == "keep": runfetchcmd("%s clone -n %s %s" % (ud.basecmd, ud.clonedir, coprefix), d) os.chdir(coprefix) runfetchcmd("%s checkout -q -f %s%s" % (ud.basecmd, ud.tag, readpathspec), d) else: bb.mkdirhier(codir) os.chdir(ud.clonedir) runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.tag, readpathspec), d) runfetchcmd("%s checkout-index -q -f --prefix=%s -a" % (ud.basecmd, coprefix), d) os.chdir(codir) logger.info("Creating tarball of git checkout") runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d) os.chdir(ud.clonedir) bb.utils.prunedir(codir)
def go(self, d, urls = []): """Fetch urls""" if not urls: urls = self.urls localdata = data.createCopy(d) data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) for loc in urls: (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata)) if not "module" in parm: raise MissingParameterError("cvs method needs a 'module' parameter") else: module = parm["module"] dlfile = self.localpath(loc, localdata) dldir = data.getVar('DL_DIR', localdata, 1) # if local path contains the cvs # module, consider the dir above it to be the # download directory # pos = dlfile.find(module) # if pos: # dldir = dlfile[:pos] # else: # dldir = os.path.dirname(dlfile) # setup cvs options options = [] if 'tag' in parm: tag = parm['tag'] else: tag = "" if 'date' in parm: date = parm['date'] else: if not tag: date = Fetch.getSRCDate(d) else: date = "" if "method" in parm: method = parm["method"] else: method = "pserver" if "localdir" in parm: localdir = parm["localdir"] else: localdir = module cvs_rsh = None if method == "ext": if "rsh" in parm: cvs_rsh = parm["rsh"] tarfn = data.expand('%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, tag, date), localdata) data.setVar('TARFILES', dlfile, localdata) data.setVar('TARFN', tarfn, localdata) dl = os.path.join(dldir, tarfn) if os.access(dl, os.R_OK): bb.debug(1, "%s already exists, skipping cvs checkout." % tarfn) continue # try to use the tarball stash if Fetch.try_mirror(d, tarfn): continue if date: options.append("-D %s" % date) if tag: options.append("-r %s" % tag) olddir = os.path.abspath(os.getcwd()) os.chdir(data.expand(dldir, localdata)) # setup cvsroot if method == "dir": cvsroot = path else: cvsroot = ":" + method + ":" + user if pswd: cvsroot += ":" + pswd cvsroot += "@" + host + ":" + path data.setVar('CVSROOT', cvsroot, localdata) data.setVar('CVSCOOPTS', " ".join(options), localdata) data.setVar('CVSMODULE', module, localdata) cvscmd = data.getVar('FETCHCOMMAND', localdata, 1) cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1) if cvs_rsh: cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) # create module directory bb.debug(2, "Fetch: checking for module directory") pkg=data.expand('${PN}', d) pkgdir=os.path.join(data.expand('${CVSDIR}', localdata), pkg) moddir=os.path.join(pkgdir,localdir) if os.access(os.path.join(moddir,'CVS'), os.R_OK): bb.note("Update " + loc) # update sources there os.chdir(moddir) myret = os.system(cvsupdatecmd) else: bb.note("Fetch " + loc) # check out sources there bb.mkdirhier(pkgdir) os.chdir(pkgdir) bb.debug(1, "Running %s" % cvscmd) myret = os.system(cvscmd) if myret != 0 or not os.access(moddir, os.R_OK): try: os.rmdir(moddir) except OSError: pass raise FetchError(module) os.chdir(moddir) os.chdir('..') # tar them up to a defined filename myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(moddir))) if myret != 0: try: os.unlink(tarfn) except OSError: pass os.chdir(olddir) del localdata
def go(self, d, urls=[]): """Fetch urls""" if not urls: urls = self.urls localdata = data.createCopy(d) data.setVar("OVERRIDES", "cvs:%s" % data.getVar("OVERRIDES", localdata), localdata) data.update_data(localdata) for loc in urls: (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata)) if not "module" in parm: raise MissingParameterError("cvs method needs a 'module' parameter") else: module = parm["module"] dlfile = self.localpath(loc, localdata) dldir = data.getVar("DL_DIR", localdata, 1) # if local path contains the cvs # module, consider the dir above it to be the # download directory # pos = dlfile.find(module) # if pos: # dldir = dlfile[:pos] # else: # dldir = os.path.dirname(dlfile) # setup cvs options options = [] if "tag" in parm: tag = parm["tag"] else: tag = "" if "date" in parm: date = parm["date"] else: if not tag: date = Fetch.getSRCDate(d) else: date = "" if "method" in parm: method = parm["method"] else: method = "pserver" if "localdir" in parm: localdir = parm["localdir"] else: localdir = module cvs_rsh = None if method == "ext": if "rsh" in parm: cvs_rsh = parm["rsh"] tarfn = data.expand("%s_%s_%s_%s.tar.gz" % (module.replace("/", "."), host, tag, date), localdata) data.setVar("TARFILES", dlfile, localdata) data.setVar("TARFN", tarfn, localdata) dl = os.path.join(dldir, tarfn) if os.access(dl, os.R_OK): bb.debug(1, "%s already exists, skipping cvs checkout." % tarfn) continue pn = data.getVar("PN", d, 1) cvs_tarball_stash = None if pn: cvs_tarball_stash = data.getVar("CVS_TARBALL_STASH_%s" % pn, d, 1) if cvs_tarball_stash == None: cvs_tarball_stash = data.getVar("CVS_TARBALL_STASH", d, 1) if cvs_tarball_stash: fetchcmd = data.getVar("FETCHCOMMAND_wget", d, 1) uri = cvs_tarball_stash + tarfn bb.note("fetch " + uri) fetchcmd = fetchcmd.replace("${URI}", uri) ret = os.system(fetchcmd) if ret == 0: bb.note("Fetched %s from tarball stash, skipping checkout" % tarfn) continue if date: options.append("-D %s" % date) if tag: options.append("-r %s" % tag) olddir = os.path.abspath(os.getcwd()) os.chdir(data.expand(dldir, localdata)) # setup cvsroot if method == "dir": cvsroot = path else: cvsroot = ":" + method + ":" + user if pswd: cvsroot += ":" + pswd cvsroot += "@" + host + ":" + path data.setVar("CVSROOT", cvsroot, localdata) data.setVar("CVSCOOPTS", " ".join(options), localdata) data.setVar("CVSMODULE", module, localdata) cvscmd = data.getVar("FETCHCOMMAND", localdata, 1) cvsupdatecmd = data.getVar("UPDATECOMMAND", localdata, 1) if cvs_rsh: cvscmd = 'CVS_RSH="%s" %s' % (cvs_rsh, cvscmd) cvsupdatecmd = 'CVS_RSH="%s" %s' % (cvs_rsh, cvsupdatecmd) # create module directory bb.debug(2, "Fetch: checking for module directory") pkg = data.expand("${PN}", d) pkgdir = os.path.join(data.expand("${CVSDIR}", localdata), pkg) moddir = os.path.join(pkgdir, localdir) if os.access(os.path.join(moddir, "CVS"), os.R_OK): bb.note("Update " + loc) # update sources there os.chdir(moddir) myret = os.system(cvsupdatecmd) else: bb.note("Fetch " + loc) # check out sources there bb.mkdirhier(pkgdir) os.chdir(pkgdir) bb.debug(1, "Running %s" % cvscmd) myret = os.system(cvscmd) if myret != 0: try: os.rmdir(moddir) except OSError: pass raise FetchError(module) os.chdir(moddir) os.chdir("..") # tar them up to a defined filename myret = os.system("tar -czf %s %s" % (os.path.join(dldir, tarfn), os.path.basename(moddir))) if myret != 0: try: os.unlink(tarfn) except OSError: pass os.chdir(olddir) del localdata
def go(self, loc, ud, d): """Fetch url""" if Fetch.try_mirror(d, ud.localfile): bb.msg.debug( 1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping git checkout." % ud.localpath) return if ud.user: username = ud.user + '@' else: username = "" gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.')) repofilename = 'git_%s.tar.gz' % (gitsrcname) repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename) repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) coname = '%s' % (ud.tag) if ud.subdir != "": coname = '%s_%s' % (coname, ud.subdir.replace('/', '.')) codir = os.path.join(repodir, coname) """ A single repodir can be used for multiple checkouts. Protect against corruption. """ lf = bb.utils.lockfile("%s.%s" % (repofile, '.lock')) if not os.path.exists(repodir): if Fetch.try_mirror(d, repofilename): bb.mkdirhier(repodir) os.chdir(repodir) runfetchcmd("tar -xzf %s" % (repofile), d) else: runfetchcmd( "git clone -n %s://%s%s%s %s" % (ud.proto, username, ud.host, ud.path, repodir), d) os.chdir(repodir) # Remove all but the .git directory if not self._contains_ref(ud.tag, d): runfetchcmd("rm * -Rf", d) runfetchcmd( "git fetch %s://%s%s%s %s" % (ud.proto, username, ud.host, ud.path, ud.branch), d) runfetchcmd( "git fetch --tags %s://%s%s%s" % (ud.proto, username, ud.host, ud.path), d) runfetchcmd("git prune-packed", d) runfetchcmd("git pack-redundant --all | xargs -r rm", d) os.chdir(repodir) mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) if mirror_tarballs != "0": bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository") runfetchcmd( "tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*")), d) if os.path.exists(codir): bb.utils.prunedir(codir) if ud.subdir != "": readpathspec = ":%s" % (ud.subdir) subdir = os.path.basename(ud.subdir) else: readpathspec = "" subdir = "git" bb.mkdirhier(codir) os.chdir(repodir) runfetchcmd("git read-tree %s%s" % (ud.tag, readpathspec), d) runfetchcmd( "git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, subdir, "")), d) count = runfetchcmd("git rev-list %s -- | wc -l" % (ud.tag), d, True) bb.utils.unlockfile(lf) os.chdir(codir) bb.msg.note(1, bb.msg.domain.Fetcher, "Checkins count: %s" % count) f = open(os.path.join(subdir, '.git_revision_count'), 'w') f.write(count) f.close() bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout") runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*")), d) os.chdir(repodir) bb.utils.prunedir(codir)
def go(self, loc, ud, d): # try to use the tarball stash if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile): bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping cvs checkout." % ud.localpath) return method = "pserver" if "method" in ud.parm: method = ud.parm["method"] localdir = ud.module if "localdir" in ud.parm: localdir = ud.parm["localdir"] cvs_port = "" if "port" in ud.parm: cvs_port = ud.parm["port"] cvs_rsh = None if method == "ext": if "rsh" in ud.parm: cvs_rsh = ud.parm["rsh"] if method == "dir": cvsroot = ud.path else: cvsroot = ":" + method cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True) if cvsproxyhost: cvsroot += ";proxy=" + cvsproxyhost cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True) if cvsproxyport: cvsroot += ";proxyport=" + cvsproxyport cvsroot += ":" + ud.user if ud.pswd: cvsroot += ":" + ud.pswd cvsroot += "@" + ud.host + ":" + cvs_port + ud.path options = [] if 'norecurse' in ud.parm: options.append("-l") if ud.date: # treat YYYYMMDDHHMM specially for CVS if len(ud.date) == 12: options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12])) else: options.append("-D \"%s UTC\"" % ud.date) if ud.tag: options.append("-r %s" % ud.tag) localdata = data.createCopy(d) data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) data.setVar('CVSROOT', cvsroot, localdata) data.setVar('CVSCOOPTS', " ".join(options), localdata) data.setVar('CVSMODULE', ud.module, localdata) cvscmd = data.getVar('FETCHCOMMAND', localdata, 1) cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1) if cvs_rsh: cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) # create module directory bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory") pkg = data.expand('${PN}', d) pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg) moddir = os.path.join(pkgdir,localdir) if os.access(os.path.join(moddir,'CVS'), os.R_OK): bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) # update sources there os.chdir(moddir) myret = os.system(cvsupdatecmd) else: bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) # check out sources there bb.mkdirhier(pkgdir) os.chdir(pkgdir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cvscmd) myret = os.system(cvscmd) if myret != 0 or not os.access(moddir, os.R_OK): try: os.rmdir(moddir) except OSError: pass raise FetchError(ud.module) # tar them up to a defined filename if 'fullpath' in ud.parm: os.chdir(pkgdir) myret = os.system("tar -czf %s %s" % (ud.localpath, localdir)) else: os.chdir(moddir) os.chdir('..') myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(moddir))) if myret != 0: try: os.unlink(ud.localpath) except OSError: pass raise FetchError(ud.module)
def go(self, loc, ud, d): """Fetch url""" if ud.user: username = ud.user + '@' else: username = "" repofile = os.path.join(data.getVar("DL_DIR", d, 1), ud.mirrortarball) coname = '%s' % (ud.tag) codir = os.path.join(ud.clonedir, coname) if not os.path.exists(ud.clonedir): try: Fetch.try_mirrors(ud.mirrortarball) bb.mkdirhier(ud.clonedir) os.chdir(ud.clonedir) runfetchcmd("tar -xzf %s" % (repofile), d) except: runfetchcmd("%s clone -n %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.clonedir), d) os.chdir(ud.clonedir) # Remove all but the .git directory if not self._contains_ref(ud.tag, d): runfetchcmd("rm * -Rf", d) runfetchcmd("%s fetch %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.branch), d) runfetchcmd("%s fetch --tags %s://%s%s%s" % (ud.basecmd, ud.proto, username, ud.host, ud.path), d) runfetchcmd("%s prune-packed" % ud.basecmd, d) runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d) os.chdir(ud.clonedir) mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) if mirror_tarballs != "0" or 'fullclone' in ud.parm: bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository") runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d) if 'fullclone' in ud.parm: return if os.path.exists(codir): bb.utils.prunedir(codir) subdir = ud.parm.get("subpath", "") if subdir != "": if subdir.endswith("/"): subdirbase = os.path.basename(subdir[:-1]) else: subdirbase = os.path.basename(subdir) else: subdirbase = "" if subdir != "": readpathspec = ":%s" % (subdir) codir = os.path.join(codir, "git") coprefix = os.path.join(codir, subdirbase, "") else: readpathspec = "" coprefix = os.path.join(codir, "git", "") bb.mkdirhier(codir) os.chdir(ud.clonedir) runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.tag, readpathspec), d) runfetchcmd("%s checkout-index -q -f --prefix=%s -a" % (ud.basecmd, coprefix), d) os.chdir(codir) bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout") runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d) os.chdir(ud.clonedir) bb.utils.prunedir(codir)
def go(self, loc, ud, d): """ Fetch urls """ # try to use the tarball stash if Fetch.try_mirror(d, ud.localfile): bb.msg.debug( 1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping perforce checkout." % ud.localpath) return (host, depot, user, pswd, parm) = Perforce.doparse(loc, d) if depot.find('/...') != -1: path = depot[:depot.find('/...')] else: path = depot if "module" in parm: module = parm["module"] else: module = os.path.basename(path) localdata = data.createCopy(d) data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) # Get the p4 command p4opt = "" if user: p4opt += " -u %s" % (user) if pswd: p4opt += " -P %s" % (pswd) if host: p4opt += " -p %s" % (host) p4cmd = data.getVar('FETCHCOMMAND', localdata, 1) # create temp directory bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory") bb.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata) tmppipe = os.popen( data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: bb.error( "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH." ) raise FetchError(module) if "label" in parm: depot = "%s@%s" % (depot, parm["label"]) else: cset = Perforce.getcset(d, depot, host, user, pswd, parm) depot = "%s@%s" % (depot, cset) os.chdir(tmpfile) bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) bb.msg.note(1, bb.msg.domain.Fetcher, "%s%s files %s" % (p4cmd, p4opt, depot)) p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot)) if not p4file: bb.error("Fetch: unable to get the P4 files from %s" % (depot)) raise FetchError(module) count = 0 for file in p4file: list = file.split() if list[2] == "delete": continue dest = list[0][len(path) + 1:] where = dest.find("#") os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0])) count = count + 1 if count == 0: bb.error("Fetch: No files gathered from the P4 fetch") raise FetchError(module) myret = os.system("tar -czf %s %s" % (ud.localpath, module)) if myret != 0: try: os.unlink(ud.localpath) except OSError: pass raise FetchError(module) # cleanup os.system('rm -rf %s' % tmpfile)
def go(self, d, urls = []): """Fetch urls""" if not urls: urls = self.urls localdata = data.createCopy(d) data.setVar('OVERRIDES', "svn:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) for loc in urls: (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata)) if not "module" in parm: raise MissingParameterError("svn method needs a 'module' parameter") else: module = parm["module"] dlfile = self.localpath(loc, localdata) dldir = data.getVar('DL_DIR', localdata, 1) # if local path contains the svn # module, consider the dir above it to be the # download directory # pos = dlfile.find(module) # if pos: # dldir = dlfile[:pos] # else: # dldir = os.path.dirname(dlfile) # setup svn options options = [] if 'rev' in parm: revision = parm['rev'] else: revision = "" date = Fetch.getSRCDate(d) if "proto" in parm: proto = parm["proto"] else: proto = "svn" svn_rsh = None if proto == "svn+ssh" and "rsh" in parm: svn_rsh = parm["rsh"] tarfn = data.expand('%s_%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, path.replace('/', '.'), revision, date), localdata) data.setVar('TARFILES', dlfile, localdata) data.setVar('TARFN', tarfn, localdata) # try to use the tarball stash if Fetch.check_for_tarball(d, tarfn, dldir, date): bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping svn checkout." % tarfn) continue olddir = os.path.abspath(os.getcwd()) os.chdir(data.expand(dldir, localdata)) svnroot = host + path # either use the revision, or SRCDATE in braces, or nothing for SRCDATE = "now" if revision: options.append("-r %s" % revision) elif date != "now": options.append("-r {%s}" % date) data.setVar('SVNROOT', "%s://%s/%s" % (proto, svnroot, module), localdata) data.setVar('SVNCOOPTS', " ".join(options), localdata) data.setVar('SVNMODULE', module, localdata) svncmd = data.getVar('FETCHCOMMAND', localdata, 1) svnupcmd = data.getVar('UPDATECOMMAND', localdata, 1) if svn_rsh: svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd) svnupcmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svnupcmd) pkg=data.expand('${PN}', d) pkgdir=os.path.join(data.expand('${SVNDIR}', localdata), pkg) moddir=os.path.join(pkgdir, module) bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + moddir + "'") if os.access(os.path.join(moddir,'.svn'), os.R_OK): bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) # update sources there os.chdir(moddir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnupcmd) myret = os.system(svnupcmd) else: bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) # check out sources there bb.mkdirhier(pkgdir) os.chdir(pkgdir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svncmd) myret = os.system(svncmd) if myret != 0: raise FetchError(module) os.chdir(pkgdir) # tar them up to a defined filename myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(module))) if myret != 0: try: os.unlink(tarfn) except OSError: pass os.chdir(olddir) del localdata
def go(self, loc, ud, d): """ Fetch urls """ # try to use the tarball stash if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile): bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping perforce checkout." % ud.localpath) return (host,depot,user,pswd,parm) = Perforce.doparse(loc, d) if depot.find('/...') != -1: path = depot[:depot.find('/...')] else: path = depot if "module" in parm: module = parm["module"] else: module = os.path.basename(path) localdata = data.createCopy(d) data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) # Get the p4 command if user: data.setVar('P4USER', user, localdata) if pswd: data.setVar('P4PASSWD', pswd, localdata) if host: data.setVar('P4PORT', host, localdata) p4cmd = data.getVar('FETCHCOMMAND', localdata, 1) # create temp directory bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory") bb.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata) tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") raise FetchError(module) if "label" in parm: depot = "%s@%s" % (depot,parm["label"]) else: cset = Perforce.getcset(d, depot, host, user, pswd, parm) depot = "%s@%s" % (depot,cset) os.chdir(tmpfile) bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) bb.msg.note(1, bb.msg.domain.Fetcher, "%s files %s" % (p4cmd, depot)) p4file = os.popen("%s files %s" % (p4cmd, depot)) if not p4file: bb.error("Fetch: unable to get the P4 files from %s" % (depot)) raise FetchError(module) count = 0 for file in p4file: list = file.split() if list[2] == "delete": continue dest = list[0][len(path)+1:] where = dest.find("#") os.system("%s print -o %s/%s %s" % (p4cmd, module,dest[:where],list[0])) count = count + 1 if count == 0: bb.error("Fetch: No files gathered from the P4 fetch") raise FetchError(module) myret = os.system("tar -czf %s %s" % (ud.localpath, module)) if myret != 0: try: os.unlink(ud.localpath) except OSError: pass raise FetchError(module) # cleanup os.system('rm -rf %s' % tmpfile)
def go(self, loc, ud, d): """Fetch url""" # try to use the tarball stash if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile): bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping svn checkout." % ud.localpath) return proto = "svn" if "proto" in ud.parm: proto = ud.parm["proto"] svn_rsh = None if proto == "svn+ssh" and "rsh" in ud.parm: svn_rsh = ud.parm["rsh"] svnroot = ud.host + ud.path # either use the revision, or SRCDATE in braces, or nothing for SRCDATE = "now" options = [] if ud.revision: options.append("-r %s" % ud.revision) elif ud.date != "now": options.append("-r {%s}" % ud.date) if ud.user: options.append("--username %s" % ud.user) if ud.pswd: options.append("--password %s" % ud.pswd) localdata = data.createCopy(d) data.setVar('OVERRIDES', "svn:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) data.setVar('SVNROOT', "%s://%s/%s" % (proto, svnroot, ud.module), localdata) data.setVar('SVNCOOPTS', " ".join(options), localdata) data.setVar('SVNMODULE', ud.module, localdata) svncmd = data.getVar('FETCHCOMMAND', localdata, 1) svnupcmd = data.getVar('UPDATECOMMAND', localdata, 1) if svn_rsh: svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd) svnupcmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svnupcmd) pkg = data.expand('${PN}', d) pkgdir = os.path.join(data.expand('${SVNDIR}', localdata), pkg) moddir = os.path.join(pkgdir, ud.module) bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + moddir + "'") if os.access(os.path.join(moddir, '.svn'), os.R_OK): bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) # update sources there os.chdir(moddir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnupcmd) myret = os.system(svnupcmd) else: bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) # check out sources there bb.mkdirhier(pkgdir) os.chdir(pkgdir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svncmd) myret = os.system(svncmd) if myret != 0: raise FetchError(ud.module) os.chdir(pkgdir) # tar them up to a defined filename myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module))) if myret != 0: try: os.unlink(ud.localpath) except OSError: pass raise FetchError(ud.module)
def go(self, d, urls = []): """Fetch urls""" if not urls: urls = self.urls localdata = data.createCopy(d) data.setVar('OVERRIDES', "svk:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) for loc in urls: (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata)) if not "module" in parm: raise MissingParameterError("svk method needs a 'module' parameter") else: module = parm["module"] dlfile = self.localpath(loc, localdata) dldir = data.getVar('DL_DIR', localdata, 1) # setup svk options options = [] if 'rev' in parm: revision = parm['rev'] else: revision = "" date = Fetch.getSRCDate(d) tarfn = data.expand('%s_%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, path.replace('/', '.'), revision, date), localdata) data.setVar('TARFILES', dlfile, localdata) data.setVar('TARFN', tarfn, localdata) dl = os.path.join(dldir, tarfn) if os.access(dl, os.R_OK): bb.debug(1, "%s already exists, skipping svk checkout." % tarfn) continue olddir = os.path.abspath(os.getcwd()) os.chdir(data.expand(dldir, localdata)) svkroot = host + path data.setVar('SVKROOT', svkroot, localdata) data.setVar('SVKCOOPTS', " ".join(options), localdata) data.setVar('SVKMODULE', module, localdata) svkcmd = "svk co -r {%s} %s/%s" % (date, svkroot, module) if revision: svkcmd = "svk co -r %s/%s" % (revision, svkroot, module) # create temp directory bb.debug(2, "Fetch: creating temporary directory") bb.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") raise FetchError(module) # check out sources there os.chdir(tmpfile) bb.note("Fetch " + loc) bb.debug(1, "Running %s" % svkcmd) myret = os.system(svkcmd) if myret != 0: try: os.rmdir(tmpfile) except OSError: pass raise FetchError(module) os.chdir(os.path.join(tmpfile, os.path.dirname(module))) # tar them up to a defined filename myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(module))) if myret != 0: try: os.unlink(tarfn) except OSError: pass # cleanup os.system('rm -rf %s' % tmpfile) os.chdir(olddir) del localdata
def go(self, loc, ud, d): """Fetch url""" if ud.user: username = ud.user + '@' else: username = "" repofile = os.path.join(data.getVar("DL_DIR", d, 1), ud.mirrortarball) coname = '%s' % (ud.tag) codir = os.path.join(ud.clonedir, coname) if not os.path.exists(ud.clonedir): try: Fetch.try_mirrors(ud.mirrortarball) bb.mkdirhier(ud.clonedir) os.chdir(ud.clonedir) runfetchcmd("tar -xzf %s" % (repofile), d) except: runfetchcmd( "%s clone -n %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.clonedir), d) os.chdir(ud.clonedir) # Remove all but the .git directory if not self._contains_ref(ud.tag, d): runfetchcmd("rm * -Rf", d) runfetchcmd( "%s fetch %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.branch), d) runfetchcmd( "%s fetch --tags %s://%s%s%s" % (ud.basecmd, ud.proto, username, ud.host, ud.path), d) runfetchcmd("%s prune-packed" % ud.basecmd, d) runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d) os.chdir(ud.clonedir) mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) if mirror_tarballs != "0" or 'fullclone' in ud.parm: bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository") runfetchcmd( "tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*")), d) if 'fullclone' in ud.parm: return if os.path.exists(codir): bb.utils.prunedir(codir) subdir = ud.parm.get("subpath", "") if subdir != "": if subdir.endswith("/"): subdirbase = os.path.basename(subdir[:-1]) else: subdirbase = os.path.basename(subdir) else: subdirbase = "" if subdir != "": readpathspec = ":%s" % (subdir) codir = os.path.join(codir, "git") coprefix = os.path.join(codir, subdirbase, "") else: readpathspec = "" coprefix = os.path.join(codir, "git", "") bb.mkdirhier(codir) os.chdir(ud.clonedir) runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.tag, readpathspec), d) runfetchcmd( "%s checkout-index -q -f --prefix=%s -a" % (ud.basecmd, coprefix), d) os.chdir(codir) bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout") runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*")), d) os.chdir(ud.clonedir) bb.utils.prunedir(codir)