def fetch_uri(uri, ud, d): if checkonly: fetchcmd = data.getVar("CHECKCOMMAND", d, 1) elif os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND", d, 1) else: fetchcmd = data.getVar("FETCHCOMMAND", d, 1) uri = uri.split(";")[0] uri_decoded = list(decodeurl(uri)) uri_type = uri_decoded[0] uri_host = uri_decoded[1] fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) logger.info("fetch " + uri) logger.debug(2, "executing " + fetchcmd) runfetchcmd(fetchcmd, d) # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath) and not checkonly: logger.debug( 2, "The fetch command for %s returned success but %s doesn't exist?...", uri, ud.localpath) return False return True
def fetch_uri(uri, ud, d): if checkonly: fetchcmd = data.getVar("CHECKCOMMAND", d, 1) elif os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND", d, 1) else: fetchcmd = data.getVar("FETCHCOMMAND", d, 1) uri = uri.split(";")[0] uri_decoded = list(decodeurl(uri)) uri_type = uri_decoded[0] uri_host = uri_decoded[1] fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) logger.info("fetch " + uri) logger.debug(2, "executing " + fetchcmd) runfetchcmd(fetchcmd, d) # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath) and not checkonly: logger.debug(2, "The fetch command for %s returned success but %s doesn't exist?...", uri, ud.localpath) return False return True
def _sortable_buildindex_disabled(self, url, ud, d, rev): """ Return a suitable buildindex for the revision specified. This is done by counting revisions using "git rev-list" which may or may not work in different circumstances. """ cwd = os.getcwd() # Check if we have the rev already if not os.path.exists(ud.clonedir): print("no repo") self.go(None, ud, d) if not os.path.exists(ud.clonedir): logger.error("GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value", url, ud.clonedir) return None os.chdir(ud.clonedir) if not self._contains_ref(rev, d): self.go(None, ud, d) output = runfetchcmd("%s rev-list %s -- 2> /dev/null | wc -l" % (ud.basecmd, rev), d, quiet=True) os.chdir(cwd) buildindex = "%s" % output.split()[0] logger.debug(1, "GIT repository for %s in %s is returning %s revisions in rev-list before %s", url, ud.clonedir, buildindex, rev) return buildindex
def _latest_revision(self, url, ud, d): """ Compute the HEAD revision for the url """ output = runfetchcmd( "git ls-remote %s://%s%s %s" % (ud.proto, ud.host, ud.path, ud.branch), d, True) return output.split()[0]
def _contains_ref(self, tag, d): basecmd = data.getVar("FETCHCMD_git", d, True) or "git" output = runfetchcmd( "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (basecmd, tag), d, quiet=True) return output.split()[0] != "0"
def _latest_revision(self, url, ud, d): """ Return the latest upstream revision number """ bb.msg.debug(2, bb.msg.domain.Fetcher, "BZR fetcher hitting network for %s" % url) output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True) return output.strip()
def _latest_revision(self, url, ud, d): """ Compute the HEAD revision for the url """ if ud.user: username = ud.user + '@' else: username = "" output = runfetchcmd("git ls-remote %s://%s%s%s %s" % (ud.proto, username, ud.host, ud.path, ud.branch), d, True) return output.split()[0]
def go(self, loc, ud, d): """Fetch url""" if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK): logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath) return gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", ".")) repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo") codir = os.path.join(repodir, gitsrcname, ud.manifest) if ud.user: username = ud.user + "@" else: username = "" bb.mkdirhier(os.path.join(codir, "repo")) os.chdir(os.path.join(codir, "repo")) if not os.path.exists(os.path.join(codir, "repo", ".repo")): runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d) runfetchcmd("repo sync", d) os.chdir(codir) scmdata = ud.parm.get("scmdata", "") if scmdata == "keep": tar_flags = "" else: tar_flags = "--exclude '.repo' --exclude '.git'" # Create a cache runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d)
def go(self, loc, ud, d): """Fetch url""" logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK): svnupdatecmd = self._buildsvncommand(ud, d, "update") logger.info("Update " + loc) # update sources there os.chdir(ud.moddir) logger.debug(1, "Running %s", svnupdatecmd) runfetchcmd(svnupdatecmd, d) else: svnfetchcmd = self._buildsvncommand(ud, d, "fetch") logger.info("Fetch " + loc) # check out sources there bb.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) logger.debug(1, "Running %s", svnfetchcmd) runfetchcmd(svnfetchcmd, d) os.chdir(ud.pkgdir) # tar them up to a defined filename try: runfetchcmd("tar --exclude '.svn' -czf %s %s" % (ud.localpath, ud.module), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def go(self, loc, ud, d): """Fetch url""" if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK): logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath) return gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", ".")) repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo") codir = os.path.join(repodir, gitsrcname, ud.manifest) if ud.user: username = ud.user + "@" else: username = "" bb.utils.mkdirhier(os.path.join(codir, "repo")) os.chdir(os.path.join(codir, "repo")) if not os.path.exists(os.path.join(codir, "repo", ".repo")): runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d) runfetchcmd("repo sync", d) os.chdir(codir) scmdata = ud.parm.get("scmdata", "") if scmdata == "keep": tar_flags = "" else: tar_flags = "--exclude '.repo' --exclude '.git'" # Create a cache runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d)
def go(self, loc, ud, d): """Fetch url""" if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK): bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping repo init / sync." % ud.localpath) return gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", ".")) repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo") codir = os.path.join(repodir, gitsrcname, ud.manifest) if ud.user: username = ud.user + "@" else: username = "" bb.mkdirhier(os.path.join(codir, "repo")) os.chdir(os.path.join(codir, "repo")) if not os.path.exists(os.path.join(codir, "repo", ".repo")): runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d) runfetchcmd("repo sync", d) os.chdir(codir) # Create a cache runfetchcmd("tar --exclude=.repo --exclude=.git -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d)
def go(self, loc, ud, d): """Fetch url""" bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK): svnupdatecmd = self._buildsvncommand(ud, d, "update") bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) # update sources there os.chdir(ud.moddir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnupdatecmd) runfetchcmd(svnupdatecmd, d) else: svnfetchcmd = self._buildsvncommand(ud, d, "fetch") bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) # check out sources there bb.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnfetchcmd) runfetchcmd(svnfetchcmd, d) os.chdir(ud.pkgdir) # tar them up to a defined filename try: runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def go(self, loc, ud, d): """ Fetch url """ logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK): oscupdatecmd = self._buildosccommand(ud, d, "update") logger.info("Update "+ loc) # update sources there os.chdir(ud.moddir) logger.debug(1, "Running %s", oscupdatecmd) runfetchcmd(oscupdatecmd, d) else: oscfetchcmd = self._buildosccommand(ud, d, "fetch") logger.info("Fetch " + loc) # check out sources there bb.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) logger.debug(1, "Running %s", oscfetchcmd) runfetchcmd(oscfetchcmd, d) os.chdir(os.path.join(ud.pkgdir + ud.path)) # tar them up to a defined filename try: runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def go(self, loc, ud, d): """Fetch url""" if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK): bzrcmd = self._buildbzrcommand(ud, d, "update") logger.debug(1, "BZR Update %s", loc) os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path))) runfetchcmd(bzrcmd, d) else: bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True) bzrcmd = self._buildbzrcommand(ud, d, "fetch") logger.debug(1, "BZR Checkout %s", loc) bb.utils.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) logger.debug(1, "Running %s", bzrcmd) runfetchcmd(bzrcmd, d) os.chdir(ud.pkgdir) scmdata = ud.parm.get("scmdata", "") if scmdata == "keep": tar_flags = "" else: tar_flags = "--exclude '.bzr' --exclude '.bzrtags'" # tar them up to a defined filename try: runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def go(self, loc, ud, d): """Fetch url""" if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK): bzrcmd = self._buildbzrcommand(ud, d, "update") bb.msg.debug(1, bb.msg.domain.Fetcher, "BZR Update %s" % loc) os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path))) runfetchcmd(bzrcmd, d) else: os.system("rm -rf %s" % os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir))) bzrcmd = self._buildbzrcommand(ud, d, "fetch") bb.msg.debug(1, bb.msg.domain.Fetcher, "BZR Checkout %s" % loc) bb.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % bzrcmd) runfetchcmd(bzrcmd, d) os.chdir(ud.pkgdir) # tar them up to a defined filename try: runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.pkgdir)), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def go(self, loc, ud, d): """ Fetch url """ bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK): oscupdatecmd = self._buildosccommand(ud, d, "update") bb.msg.note(1, bb.msg.domain.Fetcher, "Update "+ loc) # update sources there os.chdir(ud.moddir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % oscupdatecmd) runfetchcmd(oscupdatecmd, d) else: oscfetchcmd = self._buildosccommand(ud, d, "fetch") bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) # check out sources there bb.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % oscfetchcmd) runfetchcmd(oscfetchcmd, d) os.chdir(os.path.join(ud.pkgdir + ud.path)) # tar them up to a defined filename try: runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def go(self, loc, ud, d): """Fetch url""" # try to use the tarball stash if Fetch.try_mirror(d, ud.localfile): bb.msg.debug( 1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping hg checkout." % ud.localpath) return bb.msg.debug( 2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): updatecmd = self._buildhgcommand(ud, d, "pull") bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) # update sources there os.chdir(ud.moddir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % updatecmd) runfetchcmd(updatecmd, d) else: fetchcmd = self._buildhgcommand(ud, d, "fetch") bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) # check out sources there bb.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % fetchcmd) runfetchcmd(fetchcmd, d) # Even when we clone (fetch), we still need to update as hg's clone # won't checkout the specified revision if its on a branch updatecmd = self._buildhgcommand(ud, d, "update") bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % updatecmd) runfetchcmd(updatecmd, d) os.chdir(ud.pkgdir) try: runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def _latest_revision(self, url, ud, d): """ Return the latest upstream revision number """ bb.msg.debug(2, bb.msg.domain.Fetcher, "SVN fetcher hitting network for %s" % url) output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "info"), d, True) revision = None for line in output.splitlines(): if "Last Changed Rev" in line: revision = line.split(":")[1].strip() return revision
def go(self, loc, ud, d): """Fetch url""" logger.debug( 2, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): updatecmd = self._buildhgcommand(ud, d, "pull") logger.info("Update " + loc) # update sources there os.chdir(ud.moddir) logger.debug(1, "Running %s", updatecmd) runfetchcmd(updatecmd, d) else: fetchcmd = self._buildhgcommand(ud, d, "fetch") logger.info("Fetch " + loc) # check out sources there bb.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) logger.debug(1, "Running %s", fetchcmd) runfetchcmd(fetchcmd, d) # Even when we clone (fetch), we still need to update as hg's clone # won't checkout the specified revision if its on a branch updatecmd = self._buildhgcommand(ud, d, "update") os.chdir(ud.moddir) logger.debug(1, "Running %s", updatecmd) runfetchcmd(updatecmd, d) scmdata = ud.parm.get("scmdata", "") if scmdata == "keep": tar_flags = "" else: tar_flags = "--exclude '.hg' --exclude '.hgrags'" os.chdir(ud.pkgdir) try: runfetchcmd( "tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def _latest_revision(self, url, ud, d): """ Compute the HEAD revision for the url """ if ud.user: username = ud.user + '@' else: username = "" basecmd = data.getVar("FETCHCMD_git", d, True) or "git" cmd = "%s ls-remote %s://%s%s%s %s" % (basecmd, ud.proto, username, ud.host, ud.path, ud.branch) output = runfetchcmd(cmd, d, True) if not output: raise bb.fetch.FetchError("Fetch command %s gave empty output\n" % (cmd)) return output.split()[0]
def _latest_revision(self, url, ud, d): """ Return the latest upstream revision number """ bb.msg.debug(2, bb.msg.domain.Fetcher, "SVN fetcher hitting network for %s" % url) output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "log"), d, True) revision = None for line in output.splitlines(): revision = re.findall(r'^r(\d+)', line) if len(revision) >= 1: return revision[0] return revision
def go(self, loc, ud, d): """Fetch url""" logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): updatecmd = self._buildhgcommand(ud, d, "pull") logger.info("Update " + loc) # update sources there os.chdir(ud.moddir) logger.debug(1, "Running %s", updatecmd) runfetchcmd(updatecmd, d) else: fetchcmd = self._buildhgcommand(ud, d, "fetch") logger.info("Fetch " + loc) # check out sources there bb.utils.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) logger.debug(1, "Running %s", fetchcmd) runfetchcmd(fetchcmd, d) # Even when we clone (fetch), we still need to update as hg's clone # won't checkout the specified revision if its on a branch updatecmd = self._buildhgcommand(ud, d, "update") os.chdir(ud.moddir) logger.debug(1, "Running %s", updatecmd) runfetchcmd(updatecmd, d) scmdata = ud.parm.get("scmdata", "") if scmdata == "keep": tar_flags = "" else: tar_flags = "--exclude '.hg' --exclude '.hgrags'" os.chdir(ud.pkgdir) try: runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def go(self, loc, ud, d): """Fetch url""" # try to use the tarball stash if Fetch.try_mirror(d, ud.localfile): bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping hg checkout." % ud.localpath) return bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): updatecmd = self._buildhgcommand(ud, d, "pull") bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) # update sources there os.chdir(ud.moddir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % updatecmd) runfetchcmd(updatecmd, d) else: fetchcmd = self._buildhgcommand(ud, d, "fetch") bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) # check out sources there bb.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % fetchcmd) runfetchcmd(fetchcmd, d) # Even when we clone (fetch), we still need to update as hg's clone # won't checkout the specified revision if its on a branch updatecmd = self._buildhgcommand(ud, d, "update") bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % updatecmd) runfetchcmd(updatecmd, d) os.chdir(ud.pkgdir) try: runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def _sortable_revision(self, url, ud, d): """ This is only called when _want_sortable_revision called true We will have to get the updated revision. """ gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.')) repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) key = "GIT_CACHED_REVISION-%s-%s" % (gitsrcname, ud.tag) if bb.data.getVar(key, d): return bb.data.getVar(key, d) # Runtime warning on wrongly configured sources if ud.tag == "1": bb.msg.error( 1, bb.msg.domain.Fetcher, "SRCREV is '1'. This indicates a configuration error of %s" % url) return "0+1" cwd = os.getcwd() # Check if we have the rev already if not os.path.exists(repodir): print "no repo" self.go(None, ud, d) os.chdir(repodir) if not self._contains_ref(ud.tag, d): self.go(None, ud, d) output = runfetchcmd("git rev-list %s -- 2> /dev/null | wc -l" % ud.tag, d, quiet=True) os.chdir(cwd) sortable_revision = "%s+%s" % (output.split()[0], ud.tag) bb.data.setVar(key, sortable_revision, d) return sortable_revision
def go(self, loc, ud, d): """ Fetch url """ # Try to use the tarball stash if Fetch.try_mirror(d, ud.localfile): bb.msg.debug( 1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping osc checkout." % ud.localpath) return bb.msg.debug( 2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access( os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK): oscupdatecmd = self._buildosccommand(ud, d, "update") bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) # update sources there os.chdir(ud.moddir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % oscupdatecmd) runfetchcmd(oscupdatecmd, d) else: oscfetchcmd = self._buildosccommand(ud, d, "fetch") bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) # check out sources there bb.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % oscfetchcmd) runfetchcmd(oscfetchcmd, d) os.chdir(os.path.join(ud.pkgdir + ud.path)) # tar them up to a defined filename try: runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def _sortable_revision(self, url, ud, d): """ This is only called when _want_sortable_revision called true We will have to get the updated revision. """ gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.')) repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) key = "GIT_CACHED_REVISION-%s-%s" % (gitsrcname, ud.tag) if bb.data.getVar(key, d): return bb.data.getVar(key, d) # Runtime warning on wrongly configured sources if ud.tag == "1": bb.msg.error(1, bb.msg.domain.Fetcher, "SRCREV is '1'. This indicates a configuration error of %s" % url) return "0+1" cwd = os.getcwd() # Check if we have the rev already if not os.path.exists(repodir): print "no repo" self.go(None, ud, d) os.chdir(repodir) if not self._contains_ref(ud.tag, d): self.go(None, ud, d) output = runfetchcmd("git rev-list %s -- 2> /dev/null | wc -l" % ud.tag, d, quiet=True) os.chdir(cwd) sortable_revision = "%s+%s" % (output.split()[0], ud.tag) bb.data.setVar(key, sortable_revision, d) return sortable_revision
def go(self, loc, ud, d): """Fetch url""" logger.debug( 2, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK): svnupdatecmd = self._buildsvncommand(ud, d, "update") logger.info("Update " + loc) # update sources there os.chdir(ud.moddir) logger.debug(1, "Running %s", svnupdatecmd) runfetchcmd(svnupdatecmd, d) else: svnfetchcmd = self._buildsvncommand(ud, d, "fetch") logger.info("Fetch " + loc) # check out sources there bb.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) logger.debug(1, "Running %s", svnfetchcmd) runfetchcmd(svnfetchcmd, d) scmdata = ud.parm.get("scmdata", "") if scmdata == "keep": tar_flags = "" else: tar_flags = "--exclude '.svn'" os.chdir(ud.pkgdir) # tar them up to a defined filename try: runfetchcmd( "tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def go(self, loc, ud, d): """Fetch url""" if os.access( os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK): bzrcmd = self._buildbzrcommand(ud, d, "update") logger.debug(1, "BZR Update %s", loc) os.chdir(os.path.join(ud.pkgdir, os.path.basename(ud.path))) runfetchcmd(bzrcmd, d) else: bb.utils.remove( os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True) bzrcmd = self._buildbzrcommand(ud, d, "fetch") logger.debug(1, "BZR Checkout %s", loc) bb.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) logger.debug(1, "Running %s", bzrcmd) runfetchcmd(bzrcmd, d) os.chdir(ud.pkgdir) scmdata = ud.parm.get("scmdata", "") if scmdata == "keep": tar_flags = "" else: tar_flags = "--exclude '.bzr' --exclude '.bzrtags'" # tar them up to a defined filename try: runfetchcmd( "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def go(self, loc, ud, d): """Fetch url""" # try to use the tarball stash if Fetch.try_mirror(d, ud.localfile): bb.msg.debug( 1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping svn checkout." % ud.localpath ) return bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(ud.moddir, ".svn"), os.R_OK): svnupdatecmd = self._buildsvncommand(ud, d, "update") bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) # update sources there os.chdir(ud.moddir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnupdatecmd) runfetchcmd(svnupdatecmd, d) else: svnfetchcmd = self._buildsvncommand(ud, d, "fetch") bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) # check out sources there bb.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnfetchcmd) runfetchcmd(svnfetchcmd, d) os.chdir(ud.pkgdir) # tar them up to a defined filename try: runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def go(self, loc, ud, d): """Fetch url""" if os.access( os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK): bb.msg.debug( 1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping repo init / sync." % ud.localpath) return gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", ".")) repodir = data.getVar("REPODIR", d, True) or os.path.join( data.getVar("DL_DIR", d, True), "repo") codir = os.path.join(repodir, gitsrcname, ud.manifest) if ud.user: username = ud.user + "@" else: username = "" bb.mkdirhier(os.path.join(codir, "repo")) os.chdir(os.path.join(codir, "repo")) if not os.path.exists(os.path.join(codir, "repo", ".repo")): runfetchcmd( "repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d) runfetchcmd("repo sync", d) os.chdir(codir) # Create a cache runfetchcmd( "tar --exclude=.repo --exclude=.git -czf %s %s" % (ud.localpath, os.path.join(".", "*")), d)
def go(self, loc, ud, d): """Fetch url""" if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK): bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping git checkout." % ud.localpath) return if ud.user: username = ud.user + '@' else: username = "" gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.')) repofilename = 'git_%s.tar.gz' % (gitsrcname) repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename) repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) coname = '%s' % (ud.tag) codir = os.path.join(repodir, coname) if not os.path.exists(repodir): if Fetch.try_mirror(d, repofilename): bb.mkdirhier(repodir) os.chdir(repodir) runfetchcmd("tar -xzf %s" % (repofile), d) else: runfetchcmd("git clone -n %s://%s%s%s %s" % (ud.proto, username, ud.host, ud.path, repodir), d) os.chdir(repodir) # Remove all but the .git directory if not self._contains_ref(ud.tag, d): runfetchcmd("rm * -Rf", d) runfetchcmd("git fetch %s://%s%s%s %s" % (ud.proto, username, ud.host, ud.path, ud.branch), d) runfetchcmd("git fetch --tags %s://%s%s%s" % (ud.proto, username, ud.host, ud.path), d) runfetchcmd("git prune-packed", d) runfetchcmd("git pack-redundant --all | xargs -r rm", d) os.chdir(repodir) mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) if mirror_tarballs != "0": bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository") runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d) if os.path.exists(codir): bb.utils.prunedir(codir) bb.mkdirhier(codir) os.chdir(repodir) runfetchcmd("git read-tree %s" % (ud.tag), d) runfetchcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")), d) os.chdir(codir) bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout") runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d) os.chdir(repodir) bb.utils.prunedir(codir)
def _contains_ref(self, tag, d): output = runfetchcmd("git log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % tag, d, quiet=True) return output.split()[0] != "0"
def go(self, loc, ud, d): """Fetch url""" if Fetch.try_mirror(d, ud.localfile): bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping git checkout." % ud.localpath) return if ud.user: username = ud.user + '@' else: username = "" gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.')) repofilename = 'git_%s.tar.gz' % (gitsrcname) repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename) repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) coname = '%s' % (ud.tag) codir = os.path.join(repodir, coname) if not os.path.exists(repodir): if Fetch.try_mirror(d, repofilename): bb.mkdirhier(repodir) os.chdir(repodir) runfetchcmd("tar -xzf %s" % (repofile), d) else: runfetchcmd("git clone -n %s://%s%s%s %s" % (ud.proto, username, ud.host, ud.path, repodir), d) os.chdir(repodir) # Remove all but the .git directory if not self._contains_ref(ud.tag, d): runfetchcmd("rm * -Rf", d) runfetchcmd("git fetch %s://%s%s%s %s" % (ud.proto, username, ud.host, ud.path, ud.branch), d) runfetchcmd("git fetch --tags %s://%s%s%s" % (ud.proto, username, ud.host, ud.path), d) runfetchcmd("git prune-packed", d) runfetchcmd("git pack-redundant --all | xargs -r rm", d) os.chdir(repodir) mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) if mirror_tarballs != "0": bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository") runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d) if os.path.exists(codir): bb.utils.prunedir(codir) bb.mkdirhier(codir) os.chdir(repodir) runfetchcmd("git read-tree %s" % (ud.tag), d) runfetchcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")), d) os.chdir(codir) bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout") runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d) os.chdir(repodir) bb.utils.prunedir(codir)
def go(self, loc, ud, d): """Fetch url""" if Fetch.try_mirror(d, ud.localfile): bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping git checkout." % ud.localpath) return gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.')) repofilename = 'git_%s.tar.gz' % (gitsrcname) repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename) repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) coname = '%s' % (ud.tag) codir = os.path.join(repodir, coname) if not os.path.exists(repodir): if Fetch.try_mirror(d, repofilename): bb.mkdirhier(repodir) os.chdir(repodir) runfetchcmd("tar -xzf %s" % (repofile), d) else: runfetchcmd("git clone -n %s://%s%s %s" % (ud.proto, ud.host, ud.path, repodir), d) os.chdir(repodir) # Remove all but the .git directory runfetchcmd("rm * -Rf", d) runfetchcmd("git pull %s://%s%s" % (ud.proto, ud.host, ud.path), d) runfetchcmd("git pull --tags %s://%s%s" % (ud.proto, ud.host, ud.path), d) runfetchcmd("git prune-packed", d) runfetchcmd("git pack-redundant --all | xargs -r rm", d) # old method of downloading tags #runfetchcmd("rsync -a --verbose --stats --progress rsync://%s%s/ %s" % (ud.host, ud.path, os.path.join(repodir, ".git", "")), d) os.chdir(repodir) bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository") runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d) if os.path.exists(codir): prunedir(codir) bb.mkdirhier(codir) os.chdir(repodir) runfetchcmd("git read-tree %s" % (ud.tag), d) runfetchcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")), d) os.chdir(codir) bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout") runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d) os.chdir(repodir) prunedir(codir)
def go(self, loc, ud, d): """Fetch url""" if ud.user: username = ud.user + '@' else: username = "" repofile = os.path.join(data.getVar("DL_DIR", d, 1), ud.mirrortarball) coname = '%s' % (ud.tag) codir = os.path.join(ud.clonedir, coname) if not os.path.exists(ud.clonedir): try: Fetch.try_mirrors(ud.mirrortarball) bb.mkdirhier(ud.clonedir) os.chdir(ud.clonedir) runfetchcmd("tar -xzf %s" % (repofile), d) except: runfetchcmd( "%s clone -n %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.clonedir), d) os.chdir(ud.clonedir) # Remove all but the .git directory if not self._contains_ref(ud.tag, d): runfetchcmd("rm * -Rf", d) runfetchcmd( "%s fetch %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.branch), d) runfetchcmd( "%s fetch --tags %s://%s%s%s" % (ud.basecmd, ud.proto, username, ud.host, ud.path), d) runfetchcmd("%s prune-packed" % ud.basecmd, d) runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d) os.chdir(ud.clonedir) mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) if mirror_tarballs != "0" or 'fullclone' in ud.parm: bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository") runfetchcmd( "tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*")), d) if 'fullclone' in ud.parm: return if os.path.exists(codir): bb.utils.prunedir(codir) subdir = ud.parm.get("subpath", "") if subdir != "": if subdir.endswith("/"): subdirbase = os.path.basename(subdir[:-1]) else: subdirbase = os.path.basename(subdir) else: subdirbase = "" if subdir != "": readpathspec = ":%s" % (subdir) codir = os.path.join(codir, "git") coprefix = os.path.join(codir, subdirbase, "") else: readpathspec = "" coprefix = os.path.join(codir, "git", "") bb.mkdirhier(codir) os.chdir(ud.clonedir) runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.tag, readpathspec), d) runfetchcmd( "%s checkout-index -q -f --prefix=%s -a" % (ud.basecmd, coprefix), d) os.chdir(codir) bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout") runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*")), d) os.chdir(ud.clonedir) bb.utils.prunedir(codir)
def _contains_ref(self, tag, d): basecmd = data.getVar("FETCHCMD_git", d, True) or "git" output = runfetchcmd("%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (basecmd, tag), d, quiet=True) return output.split()[0] != "0"
def go(self, loc, ud, d): """Fetch url""" if ud.user: username = ud.user + '@' else: username = "" repofile = os.path.join(data.getVar("DL_DIR", d, 1), ud.mirrortarball) coname = '%s' % (ud.tag) codir = os.path.join(ud.clonedir, coname) # If we have no existing clone and no mirror tarball, try and obtain one if not os.path.exists(ud.clonedir) and not os.path.exists(repofile): try: Fetch.try_mirrors(ud.mirrortarball) except: pass # If the checkout doesn't exist and the mirror tarball does, extract it if not os.path.exists(ud.clonedir) and os.path.exists(repofile): bb.mkdirhier(ud.clonedir) os.chdir(ud.clonedir) runfetchcmd("tar -xzf %s" % (repofile), d) # If the repo still doesn't exist, fallback to cloning it if not os.path.exists(ud.clonedir): runfetchcmd("%s clone -n %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.clonedir), d) os.chdir(ud.clonedir) # Update the checkout if needed if not self._contains_ref(ud.tag, d) or 'fullclone' in ud.parm: # Remove all but the .git directory runfetchcmd("rm * -Rf", d) if 'fullclone' in ud.parm: runfetchcmd("%s fetch --all" % (ud.basecmd), d) else: runfetchcmd("%s fetch %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.branch), d) runfetchcmd("%s fetch --tags %s://%s%s%s" % (ud.basecmd, ud.proto, username, ud.host, ud.path), d) runfetchcmd("%s prune-packed" % ud.basecmd, d) runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d) # Generate a mirror tarball if needed os.chdir(ud.clonedir) mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) if mirror_tarballs != "0" or 'fullclone' in ud.parm: logger.info("Creating tarball of git repository") runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d) if 'fullclone' in ud.parm: return if os.path.exists(codir): bb.utils.prunedir(codir) subdir = ud.parm.get("subpath", "") if subdir != "": if subdir.endswith("/"): subdirbase = os.path.basename(subdir[:-1]) else: subdirbase = os.path.basename(subdir) else: subdirbase = "" if subdir != "": readpathspec = ":%s" % (subdir) codir = os.path.join(codir, "git") coprefix = os.path.join(codir, subdirbase, "") else: readpathspec = "" coprefix = os.path.join(codir, "git", "") scmdata = ud.parm.get("scmdata", "") if scmdata == "keep": runfetchcmd("%s clone -n %s %s" % (ud.basecmd, ud.clonedir, coprefix), d) os.chdir(coprefix) runfetchcmd("%s checkout -q -f %s%s" % (ud.basecmd, ud.tag, readpathspec), d) else: bb.mkdirhier(codir) os.chdir(ud.clonedir) runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.tag, readpathspec), d) runfetchcmd("%s checkout-index -q -f --prefix=%s -a" % (ud.basecmd, coprefix), d) os.chdir(codir) logger.info("Creating tarball of git checkout") runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d) os.chdir(ud.clonedir) bb.utils.prunedir(codir)
def _latest_revision(self, url, ud, d): output = runfetchcmd("git ls-remote %s://%s%s" % (ud.proto, ud.host, ud.path), d, True) return output.split()[0]
def go(self, loc, ud, d): """Fetch url""" if ud.user: username = ud.user + '@' else: username = "" repofile = os.path.join(data.getVar("DL_DIR", d, 1), ud.mirrortarball) coname = '%s' % (ud.tag) codir = os.path.join(ud.clonedir, coname) if not os.path.exists(ud.clonedir): try: Fetch.try_mirrors(ud.mirrortarball) bb.mkdirhier(ud.clonedir) os.chdir(ud.clonedir) runfetchcmd("tar -xzf %s" % (repofile), d) except: runfetchcmd("%s clone -n %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.clonedir), d) os.chdir(ud.clonedir) # Remove all but the .git directory if not self._contains_ref(ud.tag, d): runfetchcmd("rm * -Rf", d) runfetchcmd("%s fetch %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.branch), d) runfetchcmd("%s fetch --tags %s://%s%s%s" % (ud.basecmd, ud.proto, username, ud.host, ud.path), d) runfetchcmd("%s prune-packed" % ud.basecmd, d) runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d) os.chdir(ud.clonedir) mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) if mirror_tarballs != "0" or 'fullclone' in ud.parm: bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository") runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d) if 'fullclone' in ud.parm: return if os.path.exists(codir): bb.utils.prunedir(codir) subdir = ud.parm.get("subpath", "") if subdir != "": if subdir.endswith("/"): subdirbase = os.path.basename(subdir[:-1]) else: subdirbase = os.path.basename(subdir) else: subdirbase = "" if subdir != "": readpathspec = ":%s" % (subdir) codir = os.path.join(codir, "git") coprefix = os.path.join(codir, subdirbase, "") else: readpathspec = "" coprefix = os.path.join(codir, "git", "") bb.mkdirhier(codir) os.chdir(ud.clonedir) runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.tag, readpathspec), d) runfetchcmd("%s checkout-index -q -f --prefix=%s -a" % (ud.basecmd, coprefix), d) os.chdir(codir) bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout") runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d) os.chdir(ud.clonedir) bb.utils.prunedir(codir)
def go(self, loc, ud, d): """Fetch url""" if Fetch.try_mirror(d, ud.localfile): bb.msg.debug( 1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping git checkout." % ud.localpath) return if ud.user: username = ud.user + '@' else: username = "" gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.')) repofilename = 'git_%s.tar.gz' % (gitsrcname) repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename) repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) coname = '%s' % (ud.tag) if ud.subdir != "": coname = '%s_%s' % (coname, ud.subdir.replace('/', '.')) codir = os.path.join(repodir, coname) """ A single repodir can be used for multiple checkouts. Protect against corruption. """ lf = bb.utils.lockfile("%s.%s" % (repofile, '.lock')) if not os.path.exists(repodir): if Fetch.try_mirror(d, repofilename): bb.mkdirhier(repodir) os.chdir(repodir) runfetchcmd("tar -xzf %s" % (repofile), d) else: runfetchcmd( "git clone -n %s://%s%s%s %s" % (ud.proto, username, ud.host, ud.path, repodir), d) os.chdir(repodir) # Remove all but the .git directory if not self._contains_ref(ud.tag, d): runfetchcmd("rm * -Rf", d) runfetchcmd( "git fetch %s://%s%s%s %s" % (ud.proto, username, ud.host, ud.path, ud.branch), d) runfetchcmd( "git fetch --tags %s://%s%s%s" % (ud.proto, username, ud.host, ud.path), d) runfetchcmd("git prune-packed", d) runfetchcmd("git pack-redundant --all | xargs -r rm", d) os.chdir(repodir) mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) if mirror_tarballs != "0": bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository") runfetchcmd( "tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*")), d) if os.path.exists(codir): bb.utils.prunedir(codir) if ud.subdir != "": readpathspec = ":%s" % (ud.subdir) subdir = os.path.basename(ud.subdir) else: readpathspec = "" subdir = "git" bb.mkdirhier(codir) os.chdir(repodir) runfetchcmd("git read-tree %s%s" % (ud.tag, readpathspec), d) runfetchcmd( "git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, subdir, "")), d) count = runfetchcmd("git rev-list %s -- | wc -l" % (ud.tag), d, True) bb.utils.unlockfile(lf) os.chdir(codir) bb.msg.note(1, bb.msg.domain.Fetcher, "Checkins count: %s" % count) f = open(os.path.join(subdir, '.git_revision_count'), 'w') f.write(count) f.close() bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout") runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*")), d) os.chdir(repodir) bb.utils.prunedir(codir)
def _latest_revision(self, url, ud, d): """ Compute tip revision for the url """ output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d) return output.strip()