def _sortable_buildindex_disabled(self, url, ud, d, rev): """ Return a suitable buildindex for the revision specified. This is done by counting revisions using "git rev-list" which may or may not work in different circumstances. """ cwd = os.getcwd() # Check if we have the rev already if not os.path.exists(ud.clonedir): print("no repo") self.download(None, ud, d) if not os.path.exists(ud.clonedir): logger.error("GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value", url, ud.clonedir) return None os.chdir(ud.clonedir) if not self._contains_ref(rev, d): self.download(None, ud, d) output = runfetchcmd("%s rev-list %s -- 2> /dev/null | wc -l" % (ud.basecmd, rev), d, quiet=True) os.chdir(cwd) buildindex = "%s" % output.split()[0] logger.debug(1, "GIT repository for %s in %s is returning %s revisions in rev-list before %s", url, ud.clonedir, buildindex, rev) return buildindex
def unpack_submodules(ud, url, module, modpath, d): url += ";bareclone=1;nobranch=1" # Figure out where we clone over the bare submodules... if ud.bareclone: repo_conf = ud.destdir else: repo_conf = os.path.join(ud.destdir, '.git') try: newfetch = Fetch([url], d, cache=False) newfetch.unpack(root=os.path.dirname(os.path.join(repo_conf, 'modules', modpath))) except Exception as e: logger.error('gitsm: submodule unpack failed: %s %s' % (type(e).__name__, str(e))) raise local_path = newfetch.localpath(url) # Correct the submodule references to the local download version... runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_path}, d, workdir=ud.destdir) if ud.shallow: runfetchcmd("%(basecmd)s config submodule.%(module)s.shallow true" % {'basecmd': ud.basecmd, 'module': module}, d, workdir=ud.destdir) # Ensure the submodule repository is NOT set to bare, since we're checking it out... try: runfetchcmd("%s config core.bare false" % (ud.basecmd), d, quiet=True, workdir=os.path.join(repo_conf, 'modules', modpath)) except: logger.error("Unable to set git config core.bare to false for %s" % os.path.join(repo_conf, 'modules', modpath)) raise
def download(self, ud, d): """Fetch urls""" svkroot = ud.host + ud.path svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module) if ud.revision: svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module) # create temp directory localdata = data.createCopy(d) data.update_data(localdata) logger.debug(2, "Fetch: creating temporary directory") bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false") tmpfile = tmpfile.strip() if not tmpfile: logger.error() raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url) # check out sources there os.chdir(tmpfile) logger.info("Fetch " + ud.url) logger.debug(1, "Running %s", svkcmd) runfetchcmd(svkcmd, d, cleanup = [tmpfile]) os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) # tar them up to a defined filename runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)), d, cleanup = [ud.localpath]) # cleanup bb.utils.prunedir(tmpfile)
def download(self, loc, ud, d): """Fetch urls""" svkroot = ud.host + ud.path svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module) if ud.revision: svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module) # create temp directory localdata = data.createCopy(d) data.update_data(localdata) logger.debug(2, "Fetch: creating temporary directory") bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false") tmpfile = tmpfile.strip() if not tmpfile: logger.error() raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc) # check out sources there os.chdir(tmpfile) logger.info("Fetch " + loc) logger.debug(1, "Running %s", svkcmd) runfetchcmd(svkcmd, d, cleanup = [tmpfile]) os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) # tar them up to a defined filename runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)), d, cleanup = [ud.localpath]) # cleanup bb.utils.prunedir(tmpfile)
def unpack_submodules(ud, url, module, modpath, d): url += ";bareclone=1;nobranch=1" # Figure out where we clone over the bare submodules... if ud.bareclone: repo_conf = ud.destdir else: repo_conf = os.path.join(ud.destdir, '.git') try: newfetch = Fetch([url], d, cache=False) newfetch.unpack(root=os.path.dirname(os.path.join(repo_conf, 'modules', module))) except Exception as e: logger.error('gitsm: submodule unpack failed: %s %s' % (type(e).__name__, str(e))) raise local_path = newfetch.localpath(url) # Correct the submodule references to the local download version... runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_path}, d, workdir=ud.destdir) if ud.shallow: runfetchcmd("%(basecmd)s config submodule.%(module)s.shallow true" % {'basecmd': ud.basecmd, 'module': module}, d, workdir=ud.destdir) # Ensure the submodule repository is NOT set to bare, since we're checking it out... try: runfetchcmd("%s config core.bare false" % (ud.basecmd), d, quiet=True, workdir=os.path.join(repo_conf, 'modules', module)) except: logger.error("Unable to set git config core.bare to false for %s" % os.path.join(repo_conf, 'modules', module)) raise
def _sortable_buildindex_disabled(self, url, ud, d, rev): """ Return a suitable buildindex for the revision specified. This is done by counting revisions using "git rev-list" which may or may not work in different circumstances. """ cwd = os.getcwd() # Check if we have the rev already if not os.path.exists(ud.clonedir): logger.debug(1, "GIT repository for %s does not exist in %s. \ Downloading.", url, ud.clonedir) self.download(None, ud, d) if not os.path.exists(ud.clonedir): logger.error("GIT repository for %s does not exist in %s after \ download. Cannot get sortable buildnumber, using \ old value", url, ud.clonedir) return None os.chdir(ud.clonedir) if not self._contains_ref(rev, d): self.download(None, ud, d) output = runfetchcmd("%s rev-list %s -- 2> /dev/null | wc -l" % (ud.basecmd, rev), d, quiet=True) os.chdir(cwd) buildindex = "%s" % output.split()[0] logger.debug(1, "GIT repository for %s in %s is returning %s revisions in rev-list before %s", url, ud.clonedir, buildindex, rev) return buildindex
def need_update_submodule(ud, url, module, modpath, workdir, d): url += ";bareclone=1;nobranch=1" try: newfetch = Fetch([url], d, cache=False) new_ud = newfetch.ud[url] if new_ud.method.need_update(new_ud, d): need_update_list.append(modpath) except Exception as e: logger.error('gitsm: submodule update check failed: %s %s' % (type(e).__name__, str(e))) need_update_result = True
def download_submodule(ud, url, module, modpath, d): url += ";bareclone=1;nobranch=1" # Is the following still needed? #url += ";nocheckout=1" try: newfetch = Fetch([url], d, cache=False) newfetch.download() except Exception as e: logger.error('gitsm: submodule download failed: %s %s' % (type(e).__name__, str(e))) raise
def download_submodule(ud, url, module, modpath, d): url += ";bareclone=1;nobranch=1" # Is the following still needed? #url += ";nocheckout=1" try: newfetch = Fetch([url], d, cache=False) newfetch.download() # Drop a nugget to add each of the srcrevs we've fetched (used by need_update) runfetchcmd("%s config --add bitbake.srcrev %s" % \ (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir) except Exception as e: logger.error('gitsm: submodule download failed: %s %s' % (type(e).__name__, str(e))) raise
def download(self, ud, d): """ Fetch urls """ (host, depot, user, pswd, parm) = Perforce.doparse(ud.url, d) if depot.find('/...') != -1: path = depot[:depot.find('/...')] else: path = depot module = parm.get('module', os.path.basename(path)) # Get the p4 command p4opt = "" if user: p4opt += " -u %s" % (user) if pswd: p4opt += " -P %s" % (pswd) if host: p4opt += " -p %s" % (host) p4cmd = d.getVar('FETCHCMD_p4', True) or "p4" # create temp directory logger.debug(2, "Fetch: creating temporary directory") bb.utils.mkdirhier(d.expand('${WORKDIR}')) mktemp = d.getVar("FETCHCMD_p4mktemp", True) or d.expand("mktemp -d -q '${WORKDIR}/oep4.XXXXXX'") tmpfile, errors = bb.process.run(mktemp) tmpfile = tmpfile.strip() if not tmpfile: raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url) if "label" in parm: depot = "%s@%s" % (depot, parm["label"]) else: cset = Perforce.getcset(d, depot, host, user, pswd, parm) depot = "%s@%s" % (depot, cset) os.chdir(tmpfile) logger.info("Fetch " + ud.url) logger.info("%s%s files %s", p4cmd, p4opt, depot) p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot)) p4file = [f.rstrip() for f in p4file.splitlines()] if not p4file: raise FetchError("Fetch: unable to get the P4 files from %s" % depot, ud.url) count = 0 for file in p4file: list = file.split() if list[2] == "delete": continue dest = list[0][len(path)+1:] where = dest.find("#") subprocess.call("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]), shell=True) count = count + 1 if count == 0: logger.error() raise FetchError("Fetch: No files gathered from the P4 fetch", ud.url) runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath]) # cleanup bb.utils.prunedir(tmpfile)
def download(self, ud, d): """ Fetch urls """ (host, depot, user, pswd, parm) = Perforce.doparse(ud.url, d) if depot.find('/...') != -1: path = depot[:depot.find('/...')] else: path = depot[:depot.rfind('/')] module = parm.get('module', os.path.basename(path)) # Get the p4 command p4opt = "" if user: p4opt += " -u %s" % (user) if pswd: p4opt += " -P %s" % (pswd) if host: p4opt += " -p %s" % (host) p4cmd = d.getVar('FETCHCMD_p4', True) or "p4" # create temp directory logger.debug(2, "Fetch: creating temporary directory") bb.utils.mkdirhier(d.expand('${WORKDIR}')) mktemp = d.getVar( "FETCHCMD_p4mktemp", True) or d.expand("mktemp -d -q '${WORKDIR}/oep4.XXXXXX'") tmpfile, errors = bb.process.run(mktemp) tmpfile = tmpfile.strip() if not tmpfile: raise FetchError( "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url) if "label" in parm: depot = "%s@%s" % (depot, parm["label"]) else: cset = Perforce.getcset(d, depot, host, user, pswd, parm) depot = "%s@%s" % (depot, cset) os.chdir(tmpfile) logger.info("Fetch " + ud.url) logger.info("%s%s files %s", p4cmd, p4opt, depot) p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot)) p4file = [f.rstrip() for f in p4file.splitlines()] if not p4file: raise FetchError( "Fetch: unable to get the P4 files from %s" % depot, ud.url) count = 0 for file in p4file: list = file.split() if list[2] == "delete": continue dest = list[0][len(path) + 1:] where = dest.find("#") subprocess.call("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]), shell=True) count = count + 1 if count == 0: logger.error() raise FetchError("Fetch: No files gathered from the P4 fetch", ud.url) runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup=[ud.localpath]) # cleanup bb.utils.prunedir(tmpfile)
def download(self, loc, ud, d): """ Fetch urls """ (host, depot, user, pswd, parm) = Perforce.doparse(loc, d) if depot.find('/...') != -1: path = depot[:depot.find('/...')] else: path = depot module = parm.get('module', os.path.basename(path)) localdata = data.createCopy(d) data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) # Get the p4 command p4opt = "" if user: p4opt += " -u %s" % (user) if pswd: p4opt += " -P %s" % (pswd) if host: p4opt += " -p %s" % (host) p4cmd = data.getVar('FETCHCOMMAND', localdata, True) # create temp directory logger.debug(2, "Fetch: creating temporary directory") bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata) tmppipe = os.popen( data.getVar('MKTEMPDIRCMD', localdata, True) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: raise FetchError( "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc) if "label" in parm: depot = "%s@%s" % (depot, parm["label"]) else: cset = Perforce.getcset(d, depot, host, user, pswd, parm) depot = "%s@%s" % (depot, cset) os.chdir(tmpfile) logger.info("Fetch " + loc) logger.info("%s%s files %s", p4cmd, p4opt, depot) p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot)) if not p4file: raise FetchError( "Fetch: unable to get the P4 files from %s" % depot, loc) count = 0 for file in p4file: list = file.split() if list[2] == "delete": continue dest = list[0][len(path) + 1:] where = dest.find("#") os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0])) count = count + 1 if count == 0: logger.error() raise FetchError("Fetch: No files gathered from the P4 fetch", loc) runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup=[ud.localpath]) # cleanup bb.utils.prunedir(tmpfile)
def download(self, loc, ud, d): """ Fetch urls """ (host, depot, user, pswd, parm) = Perforce.doparse(loc, d) if depot.find("/...") != -1: path = depot[: depot.find("/...")] else: path = depot module = parm.get("module", os.path.basename(path)) localdata = data.createCopy(d) data.setVar("OVERRIDES", "p4:%s" % data.getVar("OVERRIDES", localdata), localdata) data.update_data(localdata) # Get the p4 command p4opt = "" if user: p4opt += " -u %s" % (user) if pswd: p4opt += " -P %s" % (pswd) if host: p4opt += " -p %s" % (host) p4cmd = data.getVar("FETCHCOMMAND", localdata, True) # create temp directory logger.debug(2, "Fetch: creating temporary directory") bb.utils.mkdirhier(data.expand("${WORKDIR}", localdata)) data.setVar("TMPBASE", data.expand("${WORKDIR}/oep4.XXXXXX", localdata), localdata) tmppipe = os.popen(data.getVar("MKTEMPDIRCMD", localdata, True) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc) if "label" in parm: depot = "%s@%s" % (depot, parm["label"]) else: cset = Perforce.getcset(d, depot, host, user, pswd, parm) depot = "%s@%s" % (depot, cset) os.chdir(tmpfile) logger.info("Fetch " + loc) logger.info("%s%s files %s", p4cmd, p4opt, depot) p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot)) if not p4file: raise FetchError("Fetch: unable to get the P4 files from %s" % depot, loc) count = 0 for file in p4file: list = file.split() if list[2] == "delete": continue dest = list[0][len(path) + 1 :] where = dest.find("#") os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0])) count = count + 1 if count == 0: logger.error() raise FetchError("Fetch: No files gathered from the P4 fetch", loc) runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup=[ud.localpath]) # cleanup bb.utils.prunedir(tmpfile)