def localpath(url, d): (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) if "localpath" in parm: # if user overrides local path, use it. return parm["localpath"] if not "module" in parm: raise MissingParameterError("svn method needs a 'module' parameter") else: module = parm["module"] if "rev" in parm: revision = parm["rev"] else: revision = "" date = Fetch.getSRCDate(d) if "srcdate" in parm: date = parm["srcdate"] if revision: date = "" return os.path.join( data.getVar("DL_DIR", d, 1), data.expand( "%s_%s_%s_%s_%s.tar.gz" % (module.replace("/", "."), host, path.replace("/", "."), revision, date), d ), )
def download(self, loc, ud, d): """Fetch urls""" svkroot = ud.host + ud.path svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module) if ud.revision: svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module) # create temp directory localdata = data.createCopy(d) data.update_data(localdata) logger.debug(2, "Fetch: creating temporary directory") bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false") tmpfile = tmpfile.strip() if not tmpfile: logger.error() raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc) # check out sources there os.chdir(tmpfile) logger.info("Fetch " + loc) logger.debug(1, "Running %s", svkcmd) runfetchcmd(svkcmd, d, cleanup = [tmpfile]) os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) # tar them up to a defined filename runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)), d, cleanup = [ud.localpath]) # cleanup bb.utils.prunedir(tmpfile)
def exec_func(func, d, dirs = None): """Execute an BB 'function'""" body = data.getVar(func, d) if not body: return if not dirs: dirs = (data.getVarFlag(func, 'dirs', d) or "").split() for adir in dirs: adir = data.expand(adir, d) mkdirhier(adir) if len(dirs) > 0: adir = dirs[-1] else: adir = data.getVar('B', d, 1) adir = data.expand(adir, d) try: prevdir = os.getcwd() except OSError: prevdir = data.expand('${TOPDIR}', d) if adir and os.access(adir, os.F_OK): os.chdir(adir) if data.getVarFlag(func, "python", d): exec_func_python(func, d) else: exec_func_shell(func, d) os.chdir(prevdir)
def download(self, ud, d): """Fetch urls""" svkroot = ud.host + ud.path svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module) if ud.revision: svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module) # create temp directory localdata = data.createCopy(d) data.update_data(localdata) logger.debug(2, "Fetch: creating temporary directory") bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false") tmpfile = tmpfile.strip() if not tmpfile: logger.error() raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url) # check out sources there os.chdir(tmpfile) logger.info("Fetch " + ud.url) logger.debug(1, "Running %s", svkcmd) runfetchcmd(svkcmd, d, cleanup = [tmpfile]) os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) # tar them up to a defined filename runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)), d, cleanup = [ud.localpath]) # cleanup bb.utils.prunedir(tmpfile)
def go(self, d, urls = []): """Fetch urls""" if not urls: urls = self.urls for loc in urls: (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, d)) tag = gettag(parm) proto = getprotocol(parm) gitsrcname = '%s%s' % (host, path.replace('/', '.')) repofilename = 'git_%s.tar.gz' % (gitsrcname) repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename) repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) coname = '%s' % (tag) codir = os.path.join(repodir, coname) cofile = self.localpath(loc, d) # tag=="master" must always update if (tag != "master") and Fetch.try_mirror(d, localfile(loc, d)): bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping git checkout." % cofile) continue if not os.path.exists(repodir): if Fetch.try_mirror(d, repofilename): bb.mkdirhier(repodir) os.chdir(repodir) rungitcmd("tar -xzf %s" % (repofile),d) else: rungitcmd("git clone -n %s://%s%s %s" % (proto, host, path, repodir),d) os.chdir(repodir) rungitcmd("git pull %s://%s%s" % (proto, host, path),d) rungitcmd("git pull --tags %s://%s%s" % (proto, host, path),d) rungitcmd("git prune-packed", d) rungitcmd("git pack-redundant --all | xargs -r rm", d) # Remove all but the .git directory rungitcmd("rm * -Rf", d) # old method of downloading tags #rungitcmd("rsync -a --verbose --stats --progress rsync://%s%s/ %s" % (host, path, os.path.join(repodir, ".git", "")),d) os.chdir(repodir) bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository") rungitcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ),d) if os.path.exists(codir): prunedir(codir) bb.mkdirhier(codir) os.chdir(repodir) rungitcmd("git read-tree %s" % (tag),d) rungitcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")),d) os.chdir(codir) bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout") rungitcmd("tar -czf %s %s" % (cofile, os.path.join(".", "*") ),d)
def localpath(self, url, ud, d): if not "module" in ud.parm: raise MissingParameterError( "osc method needs a 'module' parameter.") ud.module = ud.parm["module"] # Create paths to osc checkouts relpath = ud.path if relpath.startswith('/'): # Remove leading slash as os.path.join can't cope relpath = relpath[1:] ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host) ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module) if 'rev' in ud.parm: ud.revision = ud.parm['rev'] else: pv = data.getVar("PV", d, 0) rev = Fetch.srcrev_internal_helper(ud, d) if rev and rev != True: ud.revision = rev else: ud.revision = "" ud.localfile = data.expand( '%s_%s_%s.tar.gz' % (ud.module.replace( '/', '.'), ud.path.replace('/', '.'), ud.revision), d) return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def localpath(self, url, ud, d): if not "module" in ud.parm: raise MissingParameterError("osc method needs a 'module' parameter.") ud.module = ud.parm["module"] # Create paths to osc checkouts relpath = ud.path if relpath.startswith('/'): # Remove leading slash as os.path.join can't cope relpath = relpath[1:] ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host) ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module) if 'rev' in ud.parm: ud.revision = ud.parm['rev'] else: pv = data.getVar("PV", d, 0) rev = Fetch.srcrev_internal_helper(ud, d) if rev and rev != True: ud.revision = rev else: ud.revision = "" ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d) return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def generate_config(self, ud, d): """ Generate a .oscrc to be used for this run. """ config_path = os.path.join(data.expand('${OSCDIR}', d), "oscrc") if (os.path.exists(config_path)): os.remove(config_path) f = open(config_path, 'w') f.write("[general]\n") f.write("apisrv = %s\n" % ud.host) f.write("scheme = http\n") f.write("su-wrapper = su -c\n") f.write("build-root = %s\n" % data.expand('${WORKDIR}', d)) f.write( "urllist = http://moblin-obs.jf.intel.com:8888/build/%(project)s/%(repository)s/%(buildarch)s/:full/%(name)s.rpm\n" ) f.write("extra-pkgs = gzip\n") f.write("\n") f.write("[%s]\n" % ud.host) f.write("user = %s\n" % ud.parm["user"]) f.write("pass = %s\n" % ud.parm["pswd"]) f.close() return config_path
def localpath(self, url, ud, d): if not "module" in ud.parm: raise MissingParameterError("hg method needs a 'module' parameter") ud.module = ud.parm["module"] # Create paths to mercurial checkouts relpath = self._strip_leading_slashes(ud.path) ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath) ud.moddir = os.path.join(ud.pkgdir, ud.module) if 'rev' in ud.parm: ud.revision = ud.parm['rev'] else: tag = Fetch.srcrev_internal_helper(ud, d) if tag is True: ud.revision = self.latest_revision(url, ud, d) elif tag: ud.revision = tag else: ud.revision = self.latest_revision(url, ud, d) ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d) return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def localpath(url, d): (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) if "localpath" in parm: # if user overrides local path, use it. return parm["localpath"] if not "module" in parm: raise MissingParameterError("cvs method needs a 'module' parameter") else: module = parm["module"] if "tag" in parm: tag = parm["tag"] else: tag = "" if "date" in parm: date = parm["date"] else: if not tag: date = Fetch.getSRCDate(d) else: date = "" return os.path.join( data.getVar("DL_DIR", d, 1), data.expand("%s_%s_%s_%s.tar.gz" % (module.replace("/", "."), host, tag, date), d), )
def urldata_init(self, ud, d): """ init svn specific variable within url data """ if not "module" in ud.parm: raise MissingParameterError('module', ud.url) ud.basecmd = d.getVar('FETCHCMD_svn', True) ud.module = ud.parm["module"] if not "path_spec" in ud.parm: ud.path_spec = ud.module else: ud.path_spec = ud.parm["path_spec"] # Create paths to svn checkouts relpath = self._strip_leading_slashes(ud.path) ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath) ud.moddir = os.path.join(ud.pkgdir, ud.module) ud.setup_revisons(d) if 'rev' in ud.parm: ud.revision = ud.parm['rev'] ud.localfile = data.expand('%s_%s_%s_%s_.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
def urldata_init(self, ud, d): """ init svn specific variable within url data """ if not "module" in ud.parm: raise MissingParameterError('module', ud.url) ud.basecmd = d.getVar('FETCHCMD_svn', True) ud.module = ud.parm["module"] if not "path_spec" in ud.parm: ud.path_spec = ud.module else: ud.path_spec = ud.parm["path_spec"] # Create paths to svn checkouts relpath = self._strip_leading_slashes(ud.path) ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath) ud.moddir = os.path.join(ud.pkgdir, ud.module) ud.setup_revisons(d) if 'rev' in ud.parm: ud.revision = ud.parm['rev'] ud.localfile = data.expand( '%s_%s_%s_%s_.tar.gz' % (ud.module.replace( '/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
def exec_func(func, d, dirs=None): """Execute an BB 'function'""" body = data.getVar(func, d) if not body: if body is None: logger.warn("Function %s doesn't exist", func) return flags = data.getVarFlags(func, d) cleandirs = flags.get('cleandirs') if cleandirs: for cdir in data.expand(cleandirs, d).split(): bb.utils.remove(cdir, True) if dirs is None: dirs = flags.get('dirs') if dirs: dirs = data.expand(dirs, d).split() if dirs: for adir in dirs: bb.utils.mkdirhier(adir) adir = dirs[-1] else: adir = data.getVar('B', d, 1) bb.utils.mkdirhier(adir) ispython = flags.get('python') lockflag = flags.get('lockfiles') if lockflag: lockfiles = [data.expand(f, d) for f in lockflag.split()] else: lockfiles = None tempdir = data.getVar('T', d, 1) # or func allows items to be executed outside of the normal # task set, such as buildhistory task = data.getVar('BB_RUNTASK', d, 1) or func if task == func: taskfunc = task else: taskfunc = "%s.%s" % (task, func) runfmt = data.getVar('BB_RUNFMT', d, 1) or "run.{func}.{pid}" runfn = runfmt.format(taskfunc=taskfunc, task=task, func=func, pid=os.getpid()) runfile = os.path.join(tempdir, runfn) bb.utils.mkdirhier(os.path.dirname(runfile)) with bb.utils.fileslocked(lockfiles): if ispython: exec_func_python(func, d, runfile, cwd=adir) else: exec_func_shell(func, d, runfile, cwd=adir)
def go(self, loc, ud, d): """Fetch urls""" if not self.forcefetch(loc, ud, d) and Fetch.try_mirror( d, ud.localfile): return svkroot = ud.host + ud.path # pyflakes claims date is not known... it looks right svkcmd = "svk co -r {%s} %s/%s" % (date, svkroot, ud.module) if ud.revision: svkcmd = "svk co -r %s/%s" % (ud.revision, svkroot, ud.module) # create temp directory localdata = data.createCopy(d) data.update_data(localdata) bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory") bb.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) tmppipe = os.popen( data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: bb.msg.error( bb.msg.domain.Fetcher, "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH." ) raise FetchError(ud.module) # check out sources there os.chdir(tmpfile) bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svkcmd) myret = os.system(svkcmd) if myret != 0: try: os.rmdir(tmpfile) except OSError: pass raise FetchError(ud.module) os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) # tar them up to a defined filename myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module))) if myret != 0: try: os.unlink(ud.localpath) except OSError: pass raise FetchError(ud.module) # cleanup os.system('rm -rf %s' % tmpfile)
def exec_func(func, d, dirs=None): """Execute an BB 'function'""" body = data.getVar(func, d) if not body: return flags = data.getVarFlags(func, d) for item in [ 'deps', 'check', 'interactive', 'python', 'cleandirs', 'dirs', 'lockfiles', 'fakeroot' ]: if not item in flags: flags[item] = None ispython = flags['python'] cleandirs = (data.expand(flags['cleandirs'], d) or "").split() for cdir in cleandirs: os.system("rm -rf %s" % cdir) if dirs: dirs = data.expand(dirs, d) else: dirs = (data.expand(flags['dirs'], d) or "").split() for adir in dirs: mkdirhier(adir) if len(dirs) > 0: adir = dirs[-1] else: adir = data.getVar('B', d, 1) try: prevdir = os.getcwd() except OSError: prevdir = data.getVar('TOPDIR', d, True) if adir and os.access(adir, os.F_OK): os.chdir(adir) locks = [] lockfiles = (data.expand(flags['lockfiles'], d) or "").split() for lock in lockfiles: locks.append(bb.utils.lockfile(lock)) if flags['python']: exec_func_python(func, d) else: exec_func_shell(func, d, flags) for lock in locks: bb.utils.unlockfile(lock) if os.path.exists(prevdir): os.chdir(prevdir)
def localfile(url, d): """Return the filename to cache the checkout in""" (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) #if user sets localpath for file, use it instead. if "localpath" in parm: return parm["localpath"] tag = gettag(parm) return data.expand('git_%s%s_%s.tar.gz' % (host, path.replace('/', '.'), tag), d)
def clean(self, ud, d): """ Clean CVS Files and tarballs """ pkg = data.expand('${PN}', d) localdata = data.createCopy(d) data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg) bb.utils.remove(pkgdir, True) bb.utils.remove(ud.localpath)
def urldata_init(self, ud, d): """ init bzr specific variable within url data """ # Create paths to bzr checkouts relpath = self._strip_leading_slashes(ud.path) ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath) if not ud.revision: ud.revision = self.latest_revision(ud.url, ud, d) ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d)
def localpath(url, d): (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) #if user sets localpath for file, use it instead. if "localpath" in parm: return parm["localpath"] tag = gettag(parm) localname = data.expand('git_%s%s_%s.tar.gz' % (host, path.replace('/', '.'), tag), d) return os.path.join(data.getVar("DL_DIR", d, 1),data.expand('%s' % (localname), d))
def exec_func(func, d, dirs=None, logfile=NULL): """Execute an BB 'function'""" body = data.getVar(func, d) if not body: if body is None: logger.warn("Function %s doesn't exist", func) return flags = data.getVarFlags(func, d) cleandirs = flags.get('cleandirs') if cleandirs: for cdir in data.expand(cleandirs, d).split(): bb.utils.remove(cdir, True) if dirs is None: dirs = flags.get('dirs') if dirs: dirs = data.expand(dirs, d).split() if dirs: for adir in dirs: bb.utils.mkdirhier(adir) adir = dirs[-1] else: adir = data.getVar('B', d, 1) if not os.path.exists(adir): adir = None ispython = flags.get('python') fakeroot = flags.get('fakeroot') lockflag = flags.get('lockfiles') if lockflag: lockfiles = [data.expand(f, d) for f in lockflag.split()] else: lockfiles = None tempdir = data.getVar('T', d, 1) runfile = os.path.join(tempdir, 'run.{0}.{1}'.format(func, os.getpid())) with bb.utils.fileslocked(lockfiles): if ispython: exec_func_python(func, d, runfile, logfile, cwd=adir) else: exec_func_shell(func, d, runfile, logfile, cwd=adir, fakeroot=fakeroot)
def urldata_init(self, ud, d): """ init bzr specific variable within url data """ # Create paths to bzr checkouts relpath = self._strip_leading_slashes(ud.path) ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath) ud.setup_revisons(d) if not ud.revision: ud.revision = self.latest_revision(ud.url, ud, d) ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d)
def go(self, loc, ud, d): """Fetch urls""" svkroot = ud.host + ud.path svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module) if ud.revision: svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module) # create temp directory localdata = data.createCopy(d) data.update_data(localdata) logger.debug(2, "Fetch: creating temporary directory") bb.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) tmppipe = os.popen( data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: logger.error( "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH." ) raise FetchError(ud.module) # check out sources there os.chdir(tmpfile) logger.info("Fetch " + loc) logger.debug(1, "Running %s", svkcmd) myret = os.system(svkcmd) if myret != 0: try: os.rmdir(tmpfile) except OSError: pass raise FetchError(ud.module) os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) # tar them up to a defined filename myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module))) if myret != 0: try: os.unlink(ud.localpath) except OSError: pass raise FetchError(ud.module) # cleanup bb.utils.prunedir(tmpfile)
def go(self, loc, ud, d): """Fetch urls""" if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile): return svkroot = ud.host + ud.path # pyflakes claims date is not known... it looks right svkcmd = "svk co -r {%s} %s/%s" % (date, svkroot, ud.module) if ud.revision: svkcmd = "svk co -r %s/%s" % (ud.revision, svkroot, ud.module) # create temp directory localdata = data.createCopy(d) data.update_data(localdata) bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory") bb.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: bb.msg.error(bb.msg.domain.Fetcher, "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") raise FetchError(ud.module) # check out sources there os.chdir(tmpfile) bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svkcmd) myret = os.system(svkcmd) if myret != 0: try: os.rmdir(tmpfile) except OSError: pass raise FetchError(ud.module) os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) # tar them up to a defined filename myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module))) if myret != 0: try: os.unlink(ud.localpath) except OSError: pass raise FetchError(ud.module) # cleanup os.system('rm -rf %s' % tmpfile)
def urldata_init(self, ud, d): if 'protocol' in ud.parm: if ud.parm['protocol'] == 'git': raise bb.fetch2.ParameterError("Invalid protocol - if you wish to fetch from a git repository using http, you need to instead use the git:// prefix with protocol=http", ud.url) if 'downloadfilename' in ud.parm: ud.basename = ud.parm['downloadfilename'] else: ud.basename = os.path.basename(ud.path) ud.localfile = data.expand(urllib.parse.unquote(ud.basename), d) if not ud.localfile: ud.localfile = data.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."), d) self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate"
def runfetchcmd(cmd, d, quiet = False): """ Run cmd returning the command output Raise an error if interrupted or cmd fails Optionally echo command output to stdout """ bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cmd) # Need to export PATH as binary could be in metadata paths # rather than host provided pathcmd = 'export PATH=%s; %s' % (data.expand('${PATH}', d), cmd) stdout_handle = os.popen(pathcmd, "r") output = "" while 1: line = stdout_handle.readline() if not line: break if not quiet: print line, output += line status = stdout_handle.close() or 0 signal = status >> 8 exitstatus = status & 0xff if signal: raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (pathcmd, signal, output)) elif status != 0: raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (pathcmd, status, output)) return output
def localpath(self, url, ud, d): if not "module" in ud.parm: raise MissingParameterError("cvs method needs a 'module' parameter") ud.module = ud.parm["module"] ud.tag = "" if 'tag' in ud.parm: ud.tag = ud.parm['tag'] # Override the default date in certain cases if 'date' in ud.parm: ud.date = ud.parm['date'] elif ud.tag: ud.date = "" norecurse = '' if 'norecurse' in ud.parm: norecurse = '_norecurse' fullpath = '' if 'fullpath' in ud.parm: fullpath = '_fullpath' ud.localfile = data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d) return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def getTask(name): deptask = data.getVarFlag(task, name, d) if deptask: deptask = data.expand(deptask, d) if not name in task_deps: task_deps[name] = {} task_deps[name][task] = deptask
def localpath(self, url, ud, d): """ We don"t care about the git rev of the manifests repository, but we do care about the manifest to use. The default is "default". We also care about the branch or tag to be used. The default is "master". """ if "protocol" in ud.parm: ud.proto = ud.parm["protocol"] else: ud.proto = "git" if "branch" in ud.parm: ud.branch = ud.parm["branch"] else: ud.branch = "master" if "manifest" in ud.parm: manifest = ud.parm["manifest"] if manifest.endswith(".xml"): ud.manifest = manifest else: ud.manifest = manifest + ".xml" else: ud.manifest = "default.xml" ud.localfile = data.expand( "repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d) return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def localpath(self, url, ud, d): url = bb.encodeurl([ud.type, ud.host, ud.path, ud.user, ud.pswd, {}]) ud.basename = os.path.basename(ud.path) ud.localfile = data.expand(os.path.basename(url), d) return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def __init__(self, url, d): self.localfile = "" (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d)) self.date = Fetch.getSRCDate(self, d) self.url = url if not self.user and "user" in self.parm: self.user = self.parm["user"] if not self.pswd and "pswd" in self.parm: self.pswd = self.parm["pswd"] self.setup = False if "name" in self.parm: self.md5_name = "%s.md5sum" % self.parm["name"] self.sha256_name = "%s.sha256sum" % self.parm["name"] else: self.md5_name = "md5sum" self.sha256_name = "sha256sum" self.md5_expected = bb.data.getVarFlag("SRC_URI", self.md5_name, d) self.sha256_expected = bb.data.getVarFlag("SRC_URI", self.sha256_name, d) for m in methods: if m.supports(url, self, d): self.method = m return raise NoMethodError("Missing implementation for url %s" % url)
def localpath(self, url, ud, d): ud.proto = "rsync" if 'protocol' in ud.parm: ud.proto = ud.parm['protocol'] ud.branch = ud.parm.get("branch", "master") tag = Fetch.srcrev_internal_helper(ud, d) if tag is True: ud.tag = self.latest_revision(url, ud, d) elif tag: ud.tag = tag if not ud.tag: ud.tag = self.latest_revision(url, ud, d) if ud.tag == "master": ud.tag = self.latest_revision(url, ud, d) ud.localfile = data.expand( 'git_%s%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.tag), d) return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def localpath(self, url, ud, d): if 'protocol' in ud.parm: ud.proto = ud.parm['protocol'] elif not ud.host: ud.proto = 'file' else: ud.proto = "rsync" ud.branch = ud.parm.get("branch", "master") tag = Fetch.srcrev_internal_helper(ud, d) if tag is True: ud.tag = self.latest_revision(url, ud, d) elif tag: ud.tag = tag else: ud.tag = "" if not ud.tag or ud.tag == "master": ud.tag = self.latest_revision(url, ud, d) ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.tag), d) return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def _buildbzrcommand(self, ud, d, command): """ Build up an bzr commandline based on ud command is "fetch", "update", "revno" """ basecmd = data.expand('${FETCHCMD_bzr}', d) proto = ud.parm.get('proto', 'http') bzrroot = ud.host + ud.path options = [] if command == "revno": bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot) else: if ud.revision: options.append("-r %s" % ud.revision) if command == "fetch": bzrcmd = "%s co %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot) elif command == "update": bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options)) else: raise FetchError("Invalid bzr command %s" % command, ud.url) return bzrcmd
def stamp_is_current(task, d, checkdeps = 1): """Check status of a given task's stamp. returns 0 if it is not current and needs updating.""" task_graph = data.getVar('_task_graph', d) if not task_graph: task_graph = bb.digraph() data.setVar('_task_graph', task_graph, d) stamp = data.getVar('STAMP', d) if not stamp: return 0 stampfile = "%s.%s" % (data.expand(stamp, d), task) if not os.access(stampfile, os.F_OK): return 0 if checkdeps == 0: return 1 import stat tasktime = os.stat(stampfile)[stat.ST_MTIME] _deps = [] def checkStamp(graph, task): # check for existance if data.getVarFlag(task, 'nostamp', d): return 1 if not stamp_is_current(task, d, 0): return 0 depfile = "%s.%s" % (data.expand(stamp, d), task) deptime = os.stat(depfile)[stat.ST_MTIME] if deptime > tasktime: return 0 return 1 return task_graph.walkdown(task, checkStamp)
def _buildbzrcommand(self, ud, d, command): """ Build up an bzr commandline based on ud command is "fetch", "update", "revno" """ basecmd = data.expand('${FETCHCMD_bzr}', d) proto = ud.parm.get('protocol', 'http') bzrroot = ud.host + ud.path options = [] if command == "revno": bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot) else: if ud.revision: options.append("-r %s" % ud.revision) if command == "fetch": bzrcmd = "%s branch %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot) elif command == "update": bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options)) else: raise FetchError("Invalid bzr command %s" % command, ud.url) return bzrcmd
def go(self, loc, ud, d): """ Fetch url """ logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK): oscupdatecmd = self._buildosccommand(ud, d, "update") logger.info("Update "+ loc) # update sources there os.chdir(ud.moddir) logger.debug(1, "Running %s", oscupdatecmd) runfetchcmd(oscupdatecmd, d) else: oscfetchcmd = self._buildosccommand(ud, d, "fetch") logger.info("Fetch " + loc) # check out sources there bb.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) logger.debug(1, "Running %s", oscfetchcmd) runfetchcmd(oscfetchcmd, d) os.chdir(os.path.join(ud.pkgdir + ud.path)) # tar them up to a defined filename try: runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def _buildosccommand(self, ud, d, command): """ Build up an ocs commandline based on ud command is "fetch", "update", "info" """ basecmd = data.expand('${FETCHCMD_osc}', d) proto = "ocs" if "proto" in ud.parm: proto = ud.parm["proto"] options = [] config = "-c %s" % self.generate_config(ud, d) if ud.revision: options.append("-r %s" % ud.revision) coroot = ud.path if coroot.startswith('/'): # Remove leading slash as os.path.join can't cope coroot = coroot[1:] if command is "fetch": osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options)) elif command is "update": osccmd = "%s %s up %s" % (basecmd, config, " ".join(options)) else: raise FetchError("Invalid osc command %s" % command) return osccmd
def go(self, loc, ud, d): """ Fetch url """ bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK): oscupdatecmd = self._buildosccommand(ud, d, "update") bb.msg.note(1, bb.msg.domain.Fetcher, "Update "+ loc) # update sources there os.chdir(ud.moddir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % oscupdatecmd) runfetchcmd(oscupdatecmd, d) else: oscfetchcmd = self._buildosccommand(ud, d, "fetch") bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) # check out sources there bb.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % oscfetchcmd) runfetchcmd(oscfetchcmd, d) os.chdir(os.path.join(ud.pkgdir + ud.path)) # tar them up to a defined filename try: runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d) except: t, v, tb = sys.exc_info() try: os.unlink(ud.localpath) except OSError: pass raise t, v, tb
def _buildosccommand(self, ud, d, command): """ Build up an ocs commandline based on ud command is "fetch", "update", "info" """ basecmd = data.expand('${FETCHCMD_osc}', d) proto = ud.parm.get('protocol', 'ocs') options = [] config = "-c %s" % self.generate_config(ud, d) if ud.revision: options.append("-r %s" % ud.revision) coroot = self._strip_leading_slashes(ud.path) if command == "fetch": osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options)) elif command == "update": osccmd = "%s %s up %s" % (basecmd, config, " ".join(options)) else: raise FetchError("Invalid osc command %s" % command, ud.url) return osccmd
def download(self, loc, ud, d): """ Fetch url """ logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK): oscupdatecmd = self._buildosccommand(ud, d, "update") logger.info("Update "+ loc) # update sources there os.chdir(ud.moddir) logger.debug(1, "Running %s", oscupdatecmd) bb.fetch2.check_network_access(d, oscupdatecmd, ud.url) runfetchcmd(oscupdatecmd, d) else: oscfetchcmd = self._buildosccommand(ud, d, "fetch") logger.info("Fetch " + loc) # check out sources there bb.utils.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) logger.debug(1, "Running %s", oscfetchcmd) bb.fetch2.check_network_access(d, oscfetchcmd, ud.url) runfetchcmd(oscfetchcmd, d) os.chdir(os.path.join(ud.pkgdir + ud.path)) # tar them up to a defined filename runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d, cleanup = [ud.localpath])
def download(self, ud, d): """ Fetch url """ logger.debug( 2, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access( os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK): oscupdatecmd = self._buildosccommand(ud, d, "update") logger.info("Update " + ud.url) # update sources there os.chdir(ud.moddir) logger.debug(1, "Running %s", oscupdatecmd) bb.fetch2.check_network_access(d, oscupdatecmd, ud.url) runfetchcmd(oscupdatecmd, d) else: oscfetchcmd = self._buildosccommand(ud, d, "fetch") logger.info("Fetch " + ud.url) # check out sources there bb.utils.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) logger.debug(1, "Running %s", oscfetchcmd) bb.fetch2.check_network_access(d, oscfetchcmd, ud.url) runfetchcmd(oscfetchcmd, d) os.chdir(os.path.join(ud.pkgdir + ud.path)) # tar them up to a defined filename runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d, cleanup=[ud.localpath])
def _buildosccommand(self, ud, d, command): """ Build up an ocs commandline based on ud command is "fetch", "update", "info" """ basecmd = data.expand('${FETCHCMD_osc}', d) proto = "ocs" if "proto" in ud.parm: proto = ud.parm["proto"] options = [] config = "-c %s" % self.generate_config(ud, d) if ud.revision: options.append("-r %s" % ud.revision) coroot = ud.path if coroot.startswith('/'): # Remove leading slash as os.path.join can't cope coroot= coroot[1:] if command is "fetch": osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options)) elif command is "update": osccmd = "%s %s up %s" % (basecmd, config, " ".join(options)) else: raise FetchError("Invalid osc command %s" % command) return osccmd
def urldata_init(self, ud, d): """ init git specific variable within url data so that the git method like latest_revision() can work """ if 'protocol' in ud.parm: ud.proto = ud.parm['protocol'] elif not ud.host: ud.proto = 'file' else: ud.proto = "git" if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'): raise bb.fetch2.ParameterError("Invalid protocol type", ud.url) ud.nocheckout = ud.parm.get("nocheckout", "0") == "1" ud.rebaseable = ud.parm.get("rebaseable", "0") == "1" branches = ud.parm.get("branch", "master").split(',') if len(branches) != len(ud.names): raise bb.fetch2.ParameterError( "The number of name and branch parameters is not balanced", ud.url) ud.branches = {} for name in ud.names: branch = branches[ud.names.index(name)] ud.branches[name] = branch ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git" ud.write_tarballs = ( (data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0") or ud.rebaseable ud.setup_revisons(d) for name in ud.names: # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one if not ud.revisions[name] or len(ud.revisions[name]) != 40 or ( False in [c in "abcdef0123456789" for c in ud.revisions[name]]): ud.branches[name] = ud.revisions[name] ud.revisions[name] = self.latest_revision(ud.url, ud, d, name) gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.')) # for rebaseable git repo, it is necessary to keep mirror tar ball # per revision, so that even the revision disappears from the # upstream repo in the future, the mirror will remain intact and still # contains the revision if ud.rebaseable: for name in ud.names: gitsrcname = gitsrcname + '_' + ud.revisions[name] ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname) ud.fullmirror = os.path.join(data.getVar("DL_DIR", d, True), ud.mirrortarball) ud.clonedir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) ud.localfile = ud.clonedir
def __init__(self, url, d): # localpath is the location of a downloaded result. If not set, the file is local. self.donestamp = None self.localfile = "" self.localpath = None self.lockfile = None self.mirrortarball = None self.basename = None (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d)) self.date = self.getSRCDate(d) self.url = url if not self.user and "user" in self.parm: self.user = self.parm["user"] if not self.pswd and "pswd" in self.parm: self.pswd = self.parm["pswd"] self.setup = False if "name" in self.parm: self.md5_name = "%s.md5sum" % self.parm["name"] self.sha256_name = "%s.sha256sum" % self.parm["name"] else: self.md5_name = "md5sum" self.sha256_name = "sha256sum" if self.md5_name in self.parm: self.md5_expected = self.parm[self.md5_name] else: self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name) if self.sha256_name in self.parm: self.sha256_expected = self.parm[self.sha256_name] else: self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name) self.names = self.parm.get("name", 'default').split(',') self.method = None for m in methods: if m.supports(url, self, d): self.method = m break if not self.method: raise NoMethodError(url) if hasattr(self.method, "urldata_init"): self.method.urldata_init(self, d) if "localpath" in self.parm: # if user sets localpath for file, use it instead. self.localpath = self.parm["localpath"] self.basename = os.path.basename(self.localpath) elif self.localfile: self.localpath = self.method.localpath(self.url, self, d) # Note: These files should always be in DL_DIR whereas localpath may not be. basepath = d.expand("${DL_DIR}/%s" % os.path.basename(self.localpath or self.basename)) self.donestamp = basepath + '.done' self.lockfile = basepath + '.lock'
def urldata_init(self, ud, d): if 'downloadfilename' in ud.parm: ud.basename = ud.parm['downloadfilename'] else: ud.basename = os.path.basename(ud.path) ud.localfile = data.expand(urllib.unquote(ud.basename), d)
def localpath(self, url, ud, d): if 'protocol' in ud.parm: ud.proto = ud.parm['protocol'] elif not ud.host: ud.proto = 'file' else: ud.proto = "rsync" ud.branch = ud.parm.get("branch", "master") gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.')) ud.mirrortarball = 'git_%s.tar.gz' % (gitsrcname) ud.clonedir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) tag = Fetch.srcrev_internal_helper(ud, d) if tag is True: ud.tag = self.latest_revision(url, ud, d) elif tag: ud.tag = tag if not ud.tag or ud.tag == "master": ud.tag = self.latest_revision(url, ud, d) subdir = ud.parm.get("subpath", "") if subdir != "": if subdir.endswith("/"): subdir = subdir[:-1] subdirpath = os.path.join(ud.path, subdir) else: subdirpath = ud.path if 'fullclone' in ud.parm: ud.localfile = ud.mirrortarball else: ud.localfile = data.expand( 'git_%s%s_%s.tar.gz' % (ud.host, subdirpath.replace('/', '.'), ud.tag), d) ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git" if 'noclone' in ud.parm: ud.localfile = None return None return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def go(self, loc, ud, d): """Fetch url""" if Fetch.try_mirror(d, ud.localfile): bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping git checkout." % ud.localpath) return if ud.user: username = ud.user + '@' else: username = "" gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.')) repofilename = 'git_%s.tar.gz' % (gitsrcname) repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename) repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) coname = '%s' % (ud.tag) codir = os.path.join(repodir, coname) if not os.path.exists(repodir): if Fetch.try_mirror(d, repofilename): bb.mkdirhier(repodir) os.chdir(repodir) runfetchcmd("tar -xzf %s" % (repofile), d) else: runfetchcmd("git clone -n %s://%s%s%s %s" % (ud.proto, username, ud.host, ud.path, repodir), d) os.chdir(repodir) # Remove all but the .git directory if not self._contains_ref(ud.tag, d): runfetchcmd("rm * -Rf", d) runfetchcmd("git fetch %s://%s%s%s %s" % (ud.proto, username, ud.host, ud.path, ud.branch), d) runfetchcmd("git fetch --tags %s://%s%s%s" % (ud.proto, username, ud.host, ud.path), d) runfetchcmd("git prune-packed", d) runfetchcmd("git pack-redundant --all | xargs -r rm", d) os.chdir(repodir) mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) if mirror_tarballs != "0": bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository") runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d) if os.path.exists(codir): bb.utils.prunedir(codir) bb.mkdirhier(codir) os.chdir(repodir) runfetchcmd("git read-tree %s" % (ud.tag), d) runfetchcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")), d) os.chdir(codir) bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout") runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d) os.chdir(repodir) bb.utils.prunedir(codir)
def localpath(self, url, ud, d): if not "module" in ud.parm: raise MissingParameterError( "svn method needs a 'module' parameter") ud.module = ud.parm["module"] # Create paths to svn checkouts relpath = self._strip_leading_slashes(ud.path) ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath) ud.moddir = os.path.join(ud.pkgdir, ud.module) if 'rev' in ud.parm: ud.date = "" ud.revision = ud.parm['rev'] elif 'date' in ud.date: ud.date = ud.parm['date'] ud.revision = "" else: # # ***Nasty hack*** # If DATE in unexpanded PV, use ud.date (which is set from SRCDATE) # Should warn people to switch to SRCREV here # pv = data.getVar("PV", d, 0) if "DATE" in pv: ud.revision = "" else: rev = Fetch.srcrev_internal_helper(ud, d) if rev is True: ud.revision = self.latest_revision(url, ud, d) ud.date = "" elif rev: ud.revision = rev ud.date = "" else: ud.revision = "" ud.localfile = data.expand( '%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace( '/', '.'), ud.revision, ud.date), d) return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
def _buildhgcommand(self, ud, d, command): """ Build up an hg commandline based on ud command is "fetch", "update", "info" """ basecmd = data.expand('${FETCHCMD_hg}', d) proto = ud.parm.get('protocol', 'http') host = ud.host if proto == "file": host = "/" ud.host = "localhost" if not ud.user: hgroot = host + ud.path else: if ud.pswd: hgroot = ud.user + ":" + ud.pswd + "@" + host + ud.path else: hgroot = ud.user + "@" + host + ud.path if command == "info": return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module) options = [] # Don't specify revision for the fetch; clone the entire repo. # This avoids an issue if the specified revision is a tag, because # the tag actually exists in the specified revision + 1, so it won't # be available when used in any successive commands. if ud.revision and command != "fetch": options.append("-r %s" % ud.revision) if command == "fetch": cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module) elif command == "pull": # do not pass options list; limiting pull to rev causes the local # repo not to contain it and immediately following "update" command # will crash if ud.user and ud.pswd: cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull" % ( basecmd, ud.user, ud.pswd, proto) else: cmd = "%s pull" % (basecmd) elif command == "update": cmd = "%s update --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" -C %s" % ( basecmd, ud.user, ud.pswd, proto, " ".join(options)) else: raise FetchError("Invalid hg command %s" % command, ud.url) return cmd
def exec_func(func, d, dirs = None): """Execute an BB 'function'""" body = data.getVar(func, d) if not body: return flags = data.getVarFlags(func, d) for item in ['deps', 'check', 'interactive', 'python', 'cleandirs', 'dirs', 'lockfiles', 'fakeroot']: if not item in flags: flags[item] = None ispython = flags['python'] cleandirs = (data.expand(flags['cleandirs'], d) or "").split() for cdir in cleandirs: os.system("rm -rf %s" % cdir) if dirs: dirs = data.expand(dirs, d) else: dirs = (data.expand(flags['dirs'], d) or "").split() for adir in dirs: mkdirhier(adir) if len(dirs) > 0: adir = dirs[-1] else: adir = data.getVar('B', d, 1) # Save current directory try: prevdir = os.getcwd() except OSError: prevdir = data.getVar('TOPDIR', d, True) # Setup logfiles t = data.getVar('T', d, 1) if not t: bb.msg.fatal(bb.msg.domain.Build, "T not set") mkdirhier(t) # Gross hack, FIXME import random logfile = "%s/log.%s.%s.%s" % (t, func, str(os.getpid()),random.random()) runfile = "%s/run.%s.%s" % (t, func, str(os.getpid())) # Change to correct directory (if specified) if adir and os.access(adir, os.F_OK): os.chdir(adir) # Handle logfiles si = file('/dev/null', 'r') try: if bb.msg.debug_level['default'] > 0 or ispython: so = os.popen("tee \"%s\"" % logfile, "w") else: so = file(logfile, 'w') except OSError, e: bb.msg.error(bb.msg.domain.Build, "opening log file: %s" % e) pass
def add_tasks(tasklist, d): task_deps = data.getVar('_task_deps', d) if not task_deps: task_deps = {} if not 'tasks' in task_deps: task_deps['tasks'] = [] if not 'parents' in task_deps: task_deps['parents'] = {} for task in tasklist: task = data.expand(task, d) data.setVarFlag(task, 'task', 1, d) if not task in task_deps['tasks']: task_deps['tasks'].append(task) flags = data.getVarFlags(task, d) def getTask(name): if not name in task_deps: task_deps[name] = {} if name in flags: deptask = data.expand(flags[name], d) task_deps[name][task] = deptask getTask('depends') getTask('deptask') getTask('rdeptask') getTask('recrdeptask') getTask('nostamp') task_deps['parents'][task] = [] for dep in flags['deps']: dep = data.expand(dep, d) task_deps['parents'][task].append(dep) # don't assume holding a reference data.setVar('_task_deps', task_deps, d)
def exec_func(func, d, dirs = None): """Execute an BB 'function'""" body = data.getVar(func, d) if not body: if body is None: logger.warn("Function %s doesn't exist", func) return flags = data.getVarFlags(func, d) cleandirs = flags.get('cleandirs') if cleandirs: for cdir in data.expand(cleandirs, d).split(): bb.utils.remove(cdir, True) if dirs is None: dirs = flags.get('dirs') if dirs: dirs = data.expand(dirs, d).split() if dirs: for adir in dirs: bb.utils.mkdirhier(adir) adir = dirs[-1] else: adir = data.getVar('B', d, 1) bb.utils.mkdirhier(adir) ispython = flags.get('python') lockflag = flags.get('lockfiles') if lockflag: lockfiles = [data.expand(f, d) for f in lockflag.split()] else: lockfiles = None tempdir = data.getVar('T', d, 1) # or func allows items to be executed outside of the normal # task set, such as buildhistory task = data.getVar('BB_RUNTASK', d, 1) or func if task == func: taskfunc = task else: taskfunc = "%s.%s" % (task, func) runfmt = data.getVar('BB_RUNFMT', d, 1) or "run.{func}.{pid}" runfn = runfmt.format(taskfunc=taskfunc, task=task, func=func, pid=os.getpid()) runfile = os.path.join(tempdir, runfn) bb.utils.mkdirhier(os.path.dirname(runfile)) with bb.utils.fileslocked(lockfiles): if ispython: exec_func_python(func, d, runfile, cwd=adir) else: exec_func_shell(func, d, runfile, cwd=adir)
def exec_func(func, d, dirs = None): """Execute an BB 'function'""" body = data.getVar(func, d) if not body: return flags = data.getVarFlags(func, d) for item in ['deps', 'check', 'interactive', 'python', 'cleandirs', 'dirs', 'lockfiles', 'fakeroot']: if not item in flags: flags[item] = None ispython = flags['python'] cleandirs = (data.expand(flags['cleandirs'], d) or "").split() for cdir in cleandirs: os.system("rm -rf %s" % cdir) if dirs: dirs = data.expand(dirs, d) else: dirs = (data.expand(flags['dirs'], d) or "").split() for adir in dirs: mkdirhier(adir) if len(dirs) > 0: adir = dirs[-1] else: adir = data.getVar('B', d, 1) try: prevdir = os.getcwd() except OSError: prevdir = data.getVar('TOPDIR', d, True) if adir and os.access(adir, os.F_OK): os.chdir(adir) locks = [] lockfiles = (data.expand(flags['lockfiles'], d) or "").split() for lock in lockfiles: locks.append(bb.utils.lockfile(lock)) if flags['python']: exec_func_python(func, d) else: exec_func_shell(func, d, flags) for lock in locks: bb.utils.unlockfile(lock) if os.path.exists(prevdir): os.chdir(prevdir)
def exec_func(func, d, dirs=None): """Execute an BB 'function'""" body = data.getVar(func, d) if not body: if body is None: logger.warn("Function %s doesn't exist", func) return flags = data.getVarFlags(func, d) cleandirs = flags.get("cleandirs") if cleandirs: for cdir in data.expand(cleandirs, d).split(): bb.utils.remove(cdir, True) if dirs is None: dirs = flags.get("dirs") if dirs: dirs = data.expand(dirs, d).split() if dirs: for adir in dirs: bb.utils.mkdirhier(adir) adir = dirs[-1] else: adir = data.getVar("B", d, 1) if not os.path.exists(adir): adir = None ispython = flags.get("python") if flags.get("fakeroot") and not flags.get("task"): bb.fatal("Function %s specifies fakeroot but isn't a task?!" % func) lockflag = flags.get("lockfiles") if lockflag: lockfiles = [data.expand(f, d) for f in lockflag.split()] else: lockfiles = None tempdir = data.getVar("T", d, 1) bb.utils.mkdirhier(tempdir) runfile = os.path.join(tempdir, "run.{0}.{1}".format(func, os.getpid())) with bb.utils.fileslocked(lockfiles): if ispython: exec_func_python(func, d, runfile, cwd=adir) else: exec_func_shell(func, d, runfile, cwd=adir)