def getcset(d, depot, host, user, pswd, parm): p4opt = "" if "cset" in parm: return parm["cset"]; if user: p4opt += " -u %s" % (user) if pswd: p4opt += " -P %s" % (pswd) if host: p4opt += " -p %s" % (host) p4date = data.getVar("P4DATE", d, 1) if "revision" in parm: depot += "#%s" % (parm["revision"]) elif "label" in parm: depot += "@%s" % (parm["label"]) elif p4date: depot += "@%s" % (p4date) p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1) logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot) p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot)) cset = p4file.readline().strip() logger.debug(1, "READ %s", cset) if not cset: return -1 return cset.split(' ')[1]
def fetch_uri(uri, ud, d): if checkonly: fetchcmd = data.getVar("CHECKCOMMAND", d, 1) elif os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND", d, 1) else: fetchcmd = data.getVar("FETCHCOMMAND", d, 1) uri = uri.split(";")[0] uri_decoded = list(decodeurl(uri)) uri_type = uri_decoded[0] uri_host = uri_decoded[1] fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) logger.info("fetch " + uri) logger.debug(2, "executing " + fetchcmd) runfetchcmd(fetchcmd, d) # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath) and not checkonly: logger.debug(2, "The fetch command for %s returned success but %s doesn't exist?...", uri, ud.localpath) return False return True
def _sortable_buildindex_disabled(self, url, ud, d, rev): """ Return a suitable buildindex for the revision specified. This is done by counting revisions using "git rev-list" which may or may not work in different circumstances. """ cwd = os.getcwd() # Check if we have the rev already if not os.path.exists(ud.clonedir): print("no repo") self.go(None, ud, d) if not os.path.exists(ud.clonedir): logger.error("GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value", url, ud.clonedir) return None os.chdir(ud.clonedir) if not self._contains_ref(rev, d): self.go(None, ud, d) output = runfetchcmd("%s rev-list %s -- 2> /dev/null | wc -l" % (ud.basecmd, rev), d, quiet=True) os.chdir(cwd) buildindex = "%s" % output.split()[0] logger.debug(1, "GIT repository for %s in %s is returning %s revisions in rev-list before %s", url, ud.clonedir, buildindex, rev) return buildindex
def fetch_uri(uri, ud, d): if checkonly: fetchcmd = data.getVar("CHECKCOMMAND", d, 1) elif os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND", d, 1) else: fetchcmd = data.getVar("FETCHCOMMAND", d, 1) uri = uri.split(";")[0] uri_decoded = list(decodeurl(uri)) uri_type = uri_decoded[0] uri_host = uri_decoded[1] fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) logger.info("fetch " + uri) logger.debug(2, "executing " + fetchcmd) runfetchcmd(fetchcmd, d) # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath) and not checkonly: logger.debug( 2, "The fetch command for %s returned success but %s doesn't exist?...", uri, ud.localpath) return False return True
def _latest_revision(self, url, ud, d): """ Return the latest upstream revision number """ logger.debug(2, "BZR fetcher hitting network for %s", url) output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True) return output.strip()
def _latest_revision(self, url, ud, d): """ Return the latest upstream revision number """ logger.debug(2, "SVN fetcher hitting network for %s", url) output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "info"), d, True) revision = None for line in output.splitlines(): if "Last Changed Rev" in line: revision = line.split(":")[1].strip() return revision
def go(self, loc, ud, d): """Fetch urls""" svkroot = ud.host + ud.path svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module) if ud.revision: svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module) # create temp directory localdata = data.createCopy(d) data.update_data(localdata) logger.debug(2, "Fetch: creating temporary directory") bb.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) tmppipe = os.popen( data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: logger.error( "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH." ) raise FetchError(ud.module) # check out sources there os.chdir(tmpfile) logger.info("Fetch " + loc) logger.debug(1, "Running %s", svkcmd) myret = os.system(svkcmd) if myret != 0: try: os.rmdir(tmpfile) except OSError: pass raise FetchError(ud.module) os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) # tar them up to a defined filename myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module))) if myret != 0: try: os.unlink(ud.localpath) except OSError: pass raise FetchError(ud.module) # cleanup bb.utils.prunedir(tmpfile)
def go(self, loc, ud, d): """Fetch urls""" svkroot = ud.host + ud.path svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module) if ud.revision: svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module) # create temp directory localdata = data.createCopy(d) data.update_data(localdata) logger.debug(2, "Fetch: creating temporary directory") bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") raise FetchError(ud.module) # check out sources there os.chdir(tmpfile) logger.info("Fetch " + loc) logger.debug(1, "Running %s", svkcmd) myret = os.system(svkcmd) if myret != 0: try: os.rmdir(tmpfile) except OSError: pass raise FetchError(ud.module) os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) # tar them up to a defined filename myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module))) if myret != 0: try: os.unlink(ud.localpath) except OSError: pass raise FetchError(ud.module) # cleanup bb.utils.prunedir(tmpfile)
def go(self, loc, ud, d): """Fetch url""" if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK): bzrcmd = self._buildbzrcommand(ud, d, "update") logger.debug(1, "BZR Update %s", loc) os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path))) runfetchcmd(bzrcmd, d) else: bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True) bzrcmd = self._buildbzrcommand(ud, d, "fetch") logger.debug(1, "BZR Checkout %s", loc) bb.utils.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) logger.debug(1, "Running %s", bzrcmd) runfetchcmd(bzrcmd, d) os.chdir(ud.pkgdir) scmdata = ud.parm.get("scmdata", "") if scmdata == "keep": tar_flags = "" else: tar_flags = "--exclude '.bzr' --exclude '.bzrtags'" # tar them up to a defined filename try: runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def go(self, loc, ud, d): """Fetch url""" logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK): svnupdatecmd = self._buildsvncommand(ud, d, "update") logger.info("Update " + loc) # update sources there os.chdir(ud.moddir) logger.debug(1, "Running %s", svnupdatecmd) runfetchcmd(svnupdatecmd, d) else: svnfetchcmd = self._buildsvncommand(ud, d, "fetch") logger.info("Fetch " + loc) # check out sources there bb.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) logger.debug(1, "Running %s", svnfetchcmd) runfetchcmd(svnfetchcmd, d) os.chdir(ud.pkgdir) # tar them up to a defined filename try: runfetchcmd("tar --exclude '.svn' -czf %s %s" % (ud.localpath, ud.module), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def go(self, loc, ud, d): """Fetch url""" if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK): bzrcmd = self._buildbzrcommand(ud, d, "update") logger.debug(1, "BZR Update %s", loc) os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path))) runfetchcmd(bzrcmd, d) else: os.system("rm -rf %s" % os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir))) bzrcmd = self._buildbzrcommand(ud, d, "fetch") logger.debug(1, "BZR Checkout %s", loc) bb.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) logger.debug(1, "Running %s", bzrcmd) runfetchcmd(bzrcmd, d) os.chdir(ud.pkgdir) # tar them up to a defined filename try: runfetchcmd("tar --exclude '.bzr' --exclude '.bzrtags' -czf %s %s" % (ud.localpath, os.path.basename(ud.pkgdir)), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def go(self, loc, ud, d): """Fetch url""" logger.debug( 2, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): updatecmd = self._buildhgcommand(ud, d, "pull") logger.info("Update " + loc) # update sources there os.chdir(ud.moddir) logger.debug(1, "Running %s", updatecmd) runfetchcmd(updatecmd, d) else: fetchcmd = self._buildhgcommand(ud, d, "fetch") logger.info("Fetch " + loc) # check out sources there bb.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) logger.debug(1, "Running %s", fetchcmd) runfetchcmd(fetchcmd, d) # Even when we clone (fetch), we still need to update as hg's clone # won't checkout the specified revision if its on a branch updatecmd = self._buildhgcommand(ud, d, "update") os.chdir(ud.moddir) logger.debug(1, "Running %s", updatecmd) runfetchcmd(updatecmd, d) scmdata = ud.parm.get("scmdata", "") if scmdata == "keep": tar_flags = "" else: tar_flags = "--exclude '.hg' --exclude '.hgrags'" os.chdir(ud.pkgdir) try: runfetchcmd( "tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def go(self, loc, ud, d): """Fetch url""" logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): updatecmd = self._buildhgcommand(ud, d, "pull") logger.info("Update " + loc) # update sources there os.chdir(ud.moddir) logger.debug(1, "Running %s", updatecmd) runfetchcmd(updatecmd, d) else: fetchcmd = self._buildhgcommand(ud, d, "fetch") logger.info("Fetch " + loc) # check out sources there bb.utils.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) logger.debug(1, "Running %s", fetchcmd) runfetchcmd(fetchcmd, d) # Even when we clone (fetch), we still need to update as hg's clone # won't checkout the specified revision if its on a branch updatecmd = self._buildhgcommand(ud, d, "update") os.chdir(ud.moddir) logger.debug(1, "Running %s", updatecmd) runfetchcmd(updatecmd, d) scmdata = ud.parm.get("scmdata", "") if scmdata == "keep": tar_flags = "" else: tar_flags = "--exclude '.hg' --exclude '.hgrags'" os.chdir(ud.pkgdir) try: runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def go(self, loc, ud, d): """Fetch url""" if os.access( os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK): bzrcmd = self._buildbzrcommand(ud, d, "update") logger.debug(1, "BZR Update %s", loc) os.chdir(os.path.join(ud.pkgdir, os.path.basename(ud.path))) runfetchcmd(bzrcmd, d) else: bb.utils.remove( os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True) bzrcmd = self._buildbzrcommand(ud, d, "fetch") logger.debug(1, "BZR Checkout %s", loc) bb.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) logger.debug(1, "Running %s", bzrcmd) runfetchcmd(bzrcmd, d) os.chdir(ud.pkgdir) scmdata = ud.parm.get("scmdata", "") if scmdata == "keep": tar_flags = "" else: tar_flags = "--exclude '.bzr' --exclude '.bzrtags'" # tar them up to a defined filename try: runfetchcmd( "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def go(self, loc, ud, d): """Fetch url""" logger.debug( 2, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK): svnupdatecmd = self._buildsvncommand(ud, d, "update") logger.info("Update " + loc) # update sources there os.chdir(ud.moddir) logger.debug(1, "Running %s", svnupdatecmd) runfetchcmd(svnupdatecmd, d) else: svnfetchcmd = self._buildsvncommand(ud, d, "fetch") logger.info("Fetch " + loc) # check out sources there bb.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) logger.debug(1, "Running %s", svnfetchcmd) runfetchcmd(svnfetchcmd, d) scmdata = ud.parm.get("scmdata", "") if scmdata == "keep": tar_flags = "" else: tar_flags = "--exclude '.svn'" os.chdir(ud.pkgdir) # tar them up to a defined filename try: runfetchcmd( "tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def go(self, loc, ud, d): method = ud.parm.get('method', 'pserver') localdir = ud.parm.get('localdir', ud.module) cvs_port = ud.parm.get('port', '') cvs_rsh = None if method == "ext": if "rsh" in ud.parm: cvs_rsh = ud.parm["rsh"] if method == "dir": cvsroot = ud.path else: cvsroot = ":" + method cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True) if cvsproxyhost: cvsroot += ";proxy=" + cvsproxyhost cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True) if cvsproxyport: cvsroot += ";proxyport=" + cvsproxyport cvsroot += ":" + ud.user if ud.pswd: cvsroot += ":" + ud.pswd cvsroot += "@" + ud.host + ":" + cvs_port + ud.path options = [] if 'norecurse' in ud.parm: options.append("-l") if ud.date: # treat YYYYMMDDHHMM specially for CVS if len(ud.date) == 12: options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12])) else: options.append("-D \"%s UTC\"" % ud.date) if ud.tag: options.append("-r %s" % ud.tag) localdata = data.createCopy(d) data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) data.setVar('CVSROOT', cvsroot, localdata) data.setVar('CVSCOOPTS', " ".join(options), localdata) data.setVar('CVSMODULE', ud.module, localdata) cvscmd = data.getVar('FETCHCOMMAND', localdata, 1) cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1) if cvs_rsh: cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) # create module directory logger.debug(2, "Fetch: checking for module directory") pkg = data.expand('${PN}', d) pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg) moddir = os.path.join(pkgdir, localdir) if os.access(os.path.join(moddir, 'CVS'), os.R_OK): logger.info("Update " + loc) # update sources there os.chdir(moddir) myret = os.system(cvsupdatecmd) else: logger.info("Fetch " + loc) # check out sources there bb.mkdirhier(pkgdir) os.chdir(pkgdir) logger.debug(1, "Running %s", cvscmd) myret = os.system(cvscmd) if myret != 0 or not os.access(moddir, os.R_OK): try: os.rmdir(moddir) except OSError: pass raise FetchError(ud.module) # tar them up to a defined filename if 'fullpath' in ud.parm: os.chdir(pkgdir) myret = os.system("tar --exclude 'CVS' -czf %s %s" % (ud.localpath, localdir)) else: os.chdir(moddir) os.chdir('..') myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(moddir))) if myret != 0: try: os.unlink(ud.localpath) except OSError: pass raise FetchError(ud.module)
def go(self, loc, ud, d): method = ud.parm.get('method', 'pserver') localdir = ud.parm.get('localdir', ud.module) cvs_port = ud.parm.get('port', '') cvs_rsh = None if method == "ext": if "rsh" in ud.parm: cvs_rsh = ud.parm["rsh"] if method == "dir": cvsroot = ud.path else: cvsroot = ":" + method cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True) if cvsproxyhost: cvsroot += ";proxy=" + cvsproxyhost cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True) if cvsproxyport: cvsroot += ";proxyport=" + cvsproxyport cvsroot += ":" + ud.user if ud.pswd: cvsroot += ":" + ud.pswd cvsroot += "@" + ud.host + ":" + cvs_port + ud.path options = [] if 'norecurse' in ud.parm: options.append("-l") if ud.date: # treat YYYYMMDDHHMM specially for CVS if len(ud.date) == 12: options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12])) else: options.append("-D \"%s UTC\"" % ud.date) if ud.tag: options.append("-r %s" % ud.tag) localdata = data.createCopy(d) data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) data.setVar('CVSROOT', cvsroot, localdata) data.setVar('CVSCOOPTS', " ".join(options), localdata) data.setVar('CVSMODULE', ud.module, localdata) cvscmd = data.getVar('FETCHCOMMAND', localdata, 1) cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1) if cvs_rsh: cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) # create module directory logger.debug(2, "Fetch: checking for module directory") pkg = data.expand('${PN}', d) pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg) moddir = os.path.join(pkgdir, localdir) if os.access(os.path.join(moddir, 'CVS'), os.R_OK): logger.info("Update " + loc) # update sources there os.chdir(moddir) myret = os.system(cvsupdatecmd) else: logger.info("Fetch " + loc) # check out sources there bb.mkdirhier(pkgdir) os.chdir(pkgdir) logger.debug(1, "Running %s", cvscmd) myret = os.system(cvscmd) if myret != 0 or not os.access(moddir, os.R_OK): try: os.rmdir(moddir) except OSError: pass raise FetchError(ud.module) scmdata = ud.parm.get("scmdata", "") if scmdata == "keep": tar_flags = "" else: tar_flags = "--exclude 'CVS'" # tar them up to a defined filename if 'fullpath' in ud.parm: os.chdir(pkgdir) myret = os.system("tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir)) else: os.chdir(moddir) os.chdir('..') myret = os.system( "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir))) if myret != 0: try: os.unlink(ud.localpath) except OSError: pass raise FetchError(ud.module)
def go(self, loc, ud, d): """ Fetch urls """ (host, depot, user, pswd, parm) = Perforce.doparse(loc, d) if depot.find('/...') != -1: path = depot[:depot.find('/...')] else: path = depot module = parm.get('module', os.path.basename(path)) localdata = data.createCopy(d) data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) # Get the p4 command p4opt = "" if user: p4opt += " -u %s" % (user) if pswd: p4opt += " -P %s" % (pswd) if host: p4opt += " -p %s" % (host) p4cmd = data.getVar('FETCHCOMMAND', localdata, 1) # create temp directory logger.debug(2, "Fetch: creating temporary directory") bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata) tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") raise FetchError(module) if "label" in parm: depot = "%s@%s" % (depot, parm["label"]) else: cset = Perforce.getcset(d, depot, host, user, pswd, parm) depot = "%s@%s" % (depot, cset) os.chdir(tmpfile) logger.info("Fetch " + loc) logger.info("%s%s files %s", p4cmd, p4opt, depot) p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot)) if not p4file: logger.error("Fetch: unable to get the P4 files from %s", depot) raise FetchError(module) count = 0 for file in p4file: list = file.split() if list[2] == "delete": continue dest = list[0][len(path)+1:] where = dest.find("#") os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0])) count = count + 1 if count == 0: logger.error("Fetch: No files gathered from the P4 fetch") raise FetchError(module) myret = os.system("tar -czf %s %s" % (ud.localpath, module)) if myret != 0: try: os.unlink(ud.localpath) except OSError: pass raise FetchError(module) # cleanup bb.utils.prunedir(tmpfile)
def go(self, loc, ud, d): """ Fetch urls """ (host, depot, user, pswd, parm) = Perforce.doparse(loc, d) if depot.find('/...') != -1: path = depot[:depot.find('/...')] else: path = depot module = parm.get('module', os.path.basename(path)) localdata = data.createCopy(d) data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) # Get the p4 command p4opt = "" if user: p4opt += " -u %s" % (user) if pswd: p4opt += " -P %s" % (pswd) if host: p4opt += " -p %s" % (host) p4cmd = data.getVar('FETCHCOMMAND', localdata, 1) # create temp directory logger.debug(2, "Fetch: creating temporary directory") bb.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata) tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") raise FetchError(module) if "label" in parm: depot = "%s@%s" % (depot, parm["label"]) else: cset = Perforce.getcset(d, depot, host, user, pswd, parm) depot = "%s@%s" % (depot, cset) os.chdir(tmpfile) logger.info("Fetch " + loc) logger.info("%s%s files %s", p4cmd, p4opt, depot) p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot)) if not p4file: logger.error("Fetch: unable to get the P4 files from %s", depot) raise FetchError(module) count = 0 for file in p4file: list = file.split() if list[2] == "delete": continue dest = list[0][len(path)+1:] where = dest.find("#") os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0])) count = count + 1 if count == 0: logger.error("Fetch: No files gathered from the P4 fetch") raise FetchError(module) myret = os.system("tar -czf %s %s" % (ud.localpath, module)) if myret != 0: try: os.unlink(ud.localpath) except OSError: pass raise FetchError(module) # cleanup bb.utils.prunedir(tmpfile)