def download(self, uri, ud, d, checkonly = False): """Fetch urls""" if checkonly: fetchcmd = data.getVar("CHECKCOMMAND_wget", d, True) or d.expand("/usr/bin/env wget --spider -t 5 --passive-ftp --no-check-certificate -P ${DL_DIR} '${URI}'") elif os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND_wget", d, True) or d.expand("/usr/bin/env wget -c -t 5 -nv --passive-ftp --no-check-certificate -P ${DL_DIR} '${URI}'") else: fetchcmd = data.getVar("FETCHCOMMAND_wget", d, True) or d.expand("/usr/bin/env wget -t 5 -nv --passive-ftp --no-check-certificate -P ${DL_DIR} '${URI}'") uri = uri.split(";")[0] uri_decoded = list(decodeurl(uri)) uri_type = uri_decoded[0] uri_host = uri_decoded[1] fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) if not checkonly: logger.info("fetch " + uri) logger.debug(2, "executing " + fetchcmd) bb.fetch2.check_network_access(d, fetchcmd) runfetchcmd(fetchcmd, d, quiet=checkonly) # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath) and not checkonly: raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri) return True
def fetch_uri(uri, ud, d): if checkonly: fetchcmd = data.getVar("CHECKCOMMAND", d, True) elif os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND", d, True) else: fetchcmd = data.getVar("FETCHCOMMAND", d, True) uri = uri.split(";")[0] uri_decoded = list(decodeurl(uri)) uri_type = uri_decoded[0] uri_host = uri_decoded[1] fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) if not checkonly: logger.info("fetch " + uri) logger.debug(2, "executing " + fetchcmd) bb.fetch2.check_network_access(d, fetchcmd) runfetchcmd(fetchcmd, d, quiet=checkonly) # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath) and not checkonly: raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
def download(self, ud, d): """Fetch url""" logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): updatecmd = self._buildhgcommand(ud, d, "pull") logger.info("Update " + ud.url) # update sources there os.chdir(ud.moddir) logger.debug(1, "Running %s", updatecmd) bb.fetch2.check_network_access(d, updatecmd, ud.url) runfetchcmd(updatecmd, d) else: fetchcmd = self._buildhgcommand(ud, d, "fetch") logger.info("Fetch " + ud.url) # check out sources there bb.utils.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) logger.debug(1, "Running %s", fetchcmd) bb.fetch2.check_network_access(d, fetchcmd, ud.url) runfetchcmd(fetchcmd, d) # Even when we clone (fetch), we still need to update as hg's clone # won't checkout the specified revision if its on a branch updatecmd = self._buildhgcommand(ud, d, "update") os.chdir(ud.moddir) logger.debug(1, "Running %s", updatecmd) runfetchcmd(updatecmd, d)
def build_mirror_data(self, url, ud, d): # Generate a mirror tarball if needed if ud.write_tarballs and (ud.repochanged or not os.path.exists(ud.fullmirror)): os.chdir(ud.clonedir) logger.info("Creating tarball of git repository") runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d) runfetchcmd("touch %s.done" % (ud.fullmirror), d)
def download(self, uri, ud, d, checkonly = False): """Fetch urls""" basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate" if not checkonly and 'downloadfilename' in ud.parm: dldir = d.getVar("DL_DIR", True) bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile)) basecmd += " -O " + dldir + os.sep + ud.localfile if checkonly: fetchcmd = d.getVar("CHECKCOMMAND_wget", True) or d.expand(basecmd + " --spider '${URI}'") elif os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = d.getVar("RESUMECOMMAND_wget", True) or d.expand(basecmd + " -c -P ${DL_DIR} '${URI}'") else: fetchcmd = d.getVar("FETCHCOMMAND_wget", True) or d.expand(basecmd + " -P ${DL_DIR} '${URI}'") uri = uri.split(";")[0] fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) if not checkonly: logger.info("fetch " + uri) logger.debug(2, "executing " + fetchcmd) bb.fetch2.check_network_access(d, fetchcmd) runfetchcmd(fetchcmd, d, quiet=checkonly) # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath) and not checkonly: raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri) return True
def download(self, loc, ud, d): """Fetch url""" logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK): svnupdatecmd = self._buildsvncommand(ud, d, "update") logger.info("Update " + loc) # update sources there os.chdir(ud.moddir) logger.debug(1, "Running %s", svnupdatecmd) bb.fetch2.check_network_access(d, svnupdatecmd, ud.url) runfetchcmd(svnupdatecmd, d) else: svnfetchcmd = self._buildsvncommand(ud, d, "fetch") logger.info("Fetch " + loc) # check out sources there bb.utils.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) logger.debug(1, "Running %s", svnfetchcmd) bb.fetch2.check_network_access(d, svnfetchcmd, ud.url) runfetchcmd(svnfetchcmd, d) scmdata = ud.parm.get("scmdata", "") if scmdata == "keep": tar_flags = "" else: tar_flags = "--exclude '.svn'" os.chdir(ud.pkgdir) # tar them up to a defined filename runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
def download(self, loc, ud, d): """Fetch urls""" svkroot = ud.host + ud.path svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module) if ud.revision: svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module) # create temp directory localdata = data.createCopy(d) data.update_data(localdata) logger.debug(2, "Fetch: creating temporary directory") bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false") tmpfile = tmpfile.strip() if not tmpfile: logger.error() raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc) # check out sources there os.chdir(tmpfile) logger.info("Fetch " + loc) logger.debug(1, "Running %s", svkcmd) runfetchcmd(svkcmd, d, cleanup = [tmpfile]) os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) # tar them up to a defined filename runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)), d, cleanup = [ud.localpath]) # cleanup bb.utils.prunedir(tmpfile)
def _remove_view(self, ud, d): if os.path.exists(ud.viewdir): cmd = self._build_ccase_command(ud, 'rmview'); logger.info("cleaning up [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname) bb.fetch2.check_network_access(d, cmd, ud.url) output = runfetchcmd(cmd, d, workdir=ud.ccasedir) logger.info("rmview output: %s", output)
def download(self, ud, d): """Fetch url""" logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK): svnupdatecmd = self._buildsvncommand(ud, d, "update") logger.info("Update " + ud.url) # We need to attempt to run svn upgrade first in case its an older working format try: runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir) except FetchError: pass logger.debug(1, "Running %s", svnupdatecmd) bb.fetch2.check_network_access(d, svnupdatecmd, ud.url) runfetchcmd(svnupdatecmd, d, workdir=ud.moddir) else: svnfetchcmd = self._buildsvncommand(ud, d, "fetch") logger.info("Fetch " + ud.url) # check out sources there bb.utils.mkdirhier(ud.pkgdir) logger.debug(1, "Running %s", svnfetchcmd) bb.fetch2.check_network_access(d, svnfetchcmd, ud.url) runfetchcmd(svnfetchcmd, d, workdir=ud.pkgdir) scmdata = ud.parm.get("scmdata", "") if scmdata == "keep": tar_flags = "" else: tar_flags = "--exclude='.svn'" # tar them up to a defined filename runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d, cleanup=[ud.localpath], workdir=ud.pkgdir)
def build_mirror_data(self, ud, d): # Generate a mirror tarball if needed if ud.write_tarballs and not os.path.exists(ud.fullmirror): # it's possible that this symlink points to read-only filesystem with PREMIRROR if os.path.islink(ud.fullmirror): os.unlink(ud.fullmirror) logger.info("Creating tarball of git repository") runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d, workdir=ud.clonedir) runfetchcmd("touch %s.done" % (ud.fullmirror), d, workdir=ud.clonedir)
def checkstatus(self, fetch, urldata, d): """ Check the status of the url """ if urldata.localpath.find("*") != -1: logger.info("URL %s looks like a glob and was therefore not checked.", urldata.url) return True if os.path.exists(urldata.localpath): return True return False
def build_mirror_data(self, ud, d): # Generate a mirror tarball if needed if ud.write_tarballs == "1" and not os.path.exists(ud.fullmirror): # it's possible that this symlink points to read-only filesystem with PREMIRROR if os.path.islink(ud.fullmirror): os.unlink(ud.fullmirror) os.chdir(ud.pkgdir) logger.info("Creating tarball of hg repository") runfetchcmd("tar -czf %s %s" % (ud.fullmirror, ud.module), d) runfetchcmd("touch %s.done" % (ud.fullmirror), d)
def build_mirror_data(self, ud, d): # Generate a mirror tarball if needed if ud.write_tarballs and not os.path.exists(ud.fullmirror): # it's possible that this symlink points to read-only filesystem with PREMIRROR if os.path.islink(ud.fullmirror): os.unlink(ud.fullmirror) dldir = d.getVar("DL_DIR", True) logger.info("Creating tarball of npm data") runfetchcmd("tar -cJf %s npm/%s npm/%s" % (ud.fullmirror, ud.bbnpmmanifest, ud.pkgname), d, workdir=dldir) runfetchcmd("touch %s.done" % (ud.fullmirror), d, workdir=dldir)
def build_mirror_data(self, url, ud, d): # Generate a mirror tarball if needed if ud.write_tarballs and (ud.repochanged or not os.path.exists(ud.fullmirror)): # it's possible that this symlink points to read-only filesystem with PREMIRROR if os.path.islink(ud.fullmirror): os.unlink(ud.fullmirror) os.chdir(ud.clonedir) logger.info("Creating tarball of git repository") runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".")), d) runfetchcmd("touch %s.done" % (ud.fullmirror), d)
def download(self, ud, d): """Fetch url""" logger.debug( 2, "Fetch: checking for module directory '" + ud.moddir + "'") # If the checkout doesn't exist and the mirror tarball does, extract it if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror): bb.utils.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) runfetchcmd("tar -xzf %s" % (ud.fullmirror), d) if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): # Found the source, check whether need pull updatecmd = self._buildhgcommand(ud, d, "update") os.chdir(ud.moddir) logger.debug(1, "Running %s", updatecmd) try: runfetchcmd(updatecmd, d) except bb.fetch2.FetchError: # Runnning pull in the repo pullcmd = self._buildhgcommand(ud, d, "pull") logger.info("Pulling " + ud.url) # update sources there os.chdir(ud.moddir) logger.debug(1, "Running %s", pullcmd) bb.fetch2.check_network_access(d, pullcmd, ud.url) runfetchcmd(pullcmd, d) try: os.unlink(ud.fullmirror) except OSError as exc: if exc.errno != errno.ENOENT: raise # No source found, clone it. if not os.path.exists(ud.moddir): fetchcmd = self._buildhgcommand(ud, d, "fetch") logger.info("Fetch " + ud.url) # check out sources there bb.utils.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) logger.debug(1, "Running %s", fetchcmd) bb.fetch2.check_network_access(d, fetchcmd, ud.url) runfetchcmd(fetchcmd, d) # Even when we clone (fetch), we still need to update as hg's clone # won't checkout the specified revision if its on a branch updatecmd = self._buildhgcommand(ud, d, "update") os.chdir(ud.moddir) logger.debug(1, "Running %s", updatecmd) runfetchcmd(updatecmd, d)
def download(self, ud, d): """Fetch url""" logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'") lf = bb.utils.lockfile(ud.svnlock) try: if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK): svncmd = self._buildsvncommand(ud, d, "update") logger.info("Update " + ud.url) # We need to attempt to run svn upgrade first in case its an older working format try: runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir) except FetchError: pass logger.debug("Running %s", svncmd) bb.fetch2.check_network_access(d, svncmd, ud.url) runfetchcmd(svncmd, d, workdir=ud.moddir) else: svncmd = self._buildsvncommand(ud, d, "fetch") logger.info("Fetch " + ud.url) # check out sources there bb.utils.mkdirhier(ud.pkgdir) logger.debug("Running %s", svncmd) bb.fetch2.check_network_access(d, svncmd, ud.url) runfetchcmd(svncmd, d, workdir=ud.pkgdir) if not ("externals" in ud.parm and ud.parm["externals"] == "nowarn"): # Warn the user if this had externals (won't catch them all) output = runfetchcmd("svn propget svn:externals || true", d, workdir=ud.moddir) if output: if "--ignore-externals" in svncmd.split(): bb.warn("%s contains svn:externals." % ud.url) bb.warn("These should be added to the recipe SRC_URI as necessary.") bb.warn("svn fetch has ignored externals:\n%s" % output) bb.warn("To disable this warning add ';externals=nowarn' to the url.") else: bb.debug(1, "svn repository has externals:\n%s" % output) scmdata = ud.parm.get("scmdata", "") if scmdata == "keep": tar_flags = "" else: tar_flags = "--exclude='.svn'" # tar them up to a defined filename runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d, cleanup=[ud.localpath], workdir=ud.pkgdir) finally: bb.utils.unlockfile(lf)
def download(self, ud, d): """Fetch url""" logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") # If the checkout doesn't exist and the mirror tarball does, extract it if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror): bb.utils.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) runfetchcmd("tar -xzf %s" % (ud.fullmirror), d) if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): # Found the source, check whether need pull updatecmd = self._buildhgcommand(ud, d, "update") os.chdir(ud.moddir) logger.debug(1, "Running %s", updatecmd) try: runfetchcmd(updatecmd, d) except bb.fetch2.FetchError: # Runnning pull in the repo pullcmd = self._buildhgcommand(ud, d, "pull") logger.info("Pulling " + ud.url) # update sources there os.chdir(ud.moddir) logger.debug(1, "Running %s", pullcmd) bb.fetch2.check_network_access(d, pullcmd, ud.url) runfetchcmd(pullcmd, d) try: os.unlink(ud.fullmirror) except OSError as exc: if exc.errno != errno.ENOENT: raise # No source found, clone it. if not os.path.exists(ud.moddir): fetchcmd = self._buildhgcommand(ud, d, "fetch") logger.info("Fetch " + ud.url) # check out sources there bb.utils.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) logger.debug(1, "Running %s", fetchcmd) bb.fetch2.check_network_access(d, fetchcmd, ud.url) runfetchcmd(fetchcmd, d) # Even when we clone (fetch), we still need to update as hg's clone # won't checkout the specified revision if its on a branch updatecmd = self._buildhgcommand(ud, d, "update") os.chdir(ud.moddir) logger.debug(1, "Running %s", updatecmd) runfetchcmd(updatecmd, d)
def build_mirror_data(self, ud, d): # Generate a mirror tarball if needed if ud.write_tarballs and not os.path.exists(ud.fullmirror): # it's possible that this symlink points to read-only filesystem with PREMIRROR if os.path.islink(ud.fullmirror): os.unlink(ud.fullmirror) dldir = d.getVar("DL_DIR") logger.info("Creating tarball of npm data") runfetchcmd("tar -cJf %s npm/%s npm/%s" % (ud.fullmirror, ud.bbnpmmanifest, ud.pkgname), d, workdir=dldir) runfetchcmd("touch %s.done" % (ud.fullmirror), d, workdir=dldir)
def build_mirror_data(self, ud, d): # Generate a mirror tarball if needed if ud.write_tarballs == "1" and not os.path.exists(ud.fullmirror): # it's possible that this symlink points to read-only filesystem with PREMIRROR if os.path.islink(ud.fullmirror): os.unlink(ud.fullmirror) logger.info("Creating tarball of hg repository") runfetchcmd("tar -czf %s %s" % (ud.fullmirror, ud.module), d, workdir=ud.pkgdir) runfetchcmd("touch %s.done" % (ud.fullmirror), d, workdir=ud.pkgdir)
def build_mirror_data(self, ud, d): # Create as a temp file and move atomically into position to avoid races @contextmanager def create_atomic(filename): fd, tfile = tempfile.mkstemp(dir=os.path.dirname(filename)) try: yield tfile umask = os.umask(0o666) os.umask(umask) os.chmod(tfile, (0o666 & ~umask)) os.rename(tfile, filename) finally: os.close(fd) if ud.shallow and ud.write_shallow_tarballs: if not os.path.exists(ud.fullshallow): if os.path.islink(ud.fullshallow): os.unlink(ud.fullshallow) tempdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR')) shallowclone = os.path.join(tempdir, 'git') try: self.clone_shallow_local(ud, shallowclone, d) logger.info("Creating tarball of git repository") with create_atomic(ud.fullshallow) as tfile: runfetchcmd("tar -czf %s ." % tfile, d, workdir=shallowclone) runfetchcmd("touch %s.done" % ud.fullshallow, d) finally: bb.utils.remove(tempdir, recurse=True) elif ud.write_tarballs and not os.path.exists(ud.fullmirror): if os.path.islink(ud.fullmirror): os.unlink(ud.fullmirror) logger.info("Creating tarball of git repository") with create_atomic(ud.fullmirror) as tfile: mtime = runfetchcmd("git log --all -1 --format=%cD", d, quiet=True, workdir=ud.clonedir) runfetchcmd( "tar -czf %s --owner oe:0 --group oe:0 --mtime \"%s\" ." % (tfile, mtime), d, workdir=ud.clonedir) runfetchcmd("touch %s.done" % ud.fullmirror, d)
def download(self, ud, d): """Fetch packages""" fetchcmd = ( d.getVar("NPM", True) or "npm" ) + " " fetchcmd += d.getVar("NPM_ARCHFLAGS", True) or "" fetchcmd += " install " + ud.fetchname fetchcmd += " --force" if not os.path.exists(ud.installdir): bb.utils.mkdirhier(ud.installdir) os.chdir(ud.installdir) with open("package.json", 'w') as f: f.write("{}\n") logger.info("npm install " + ud.url) logger.debug(2, "executing " + fetchcmd) bb.fetch2.check_network_access(d, fetchcmd) runfetchcmd(fetchcmd, d, quiet=False) return True
def download(self, uri, ud, d, checkonly=False): """Fetch urls""" basecmd = d.getVar( "FETCHCMD_wget", True ) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate" if 'downloadfilename' in ud.parm: basecmd += " -O ${DL_DIR}/" + ud.localfile if checkonly: fetchcmd = d.getVar( "CHECKCOMMAND_wget", True) or d.expand(basecmd + " --spider '${URI}'") elif os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = d.getVar( "RESUMECOMMAND_wget", True) or d.expand(basecmd + " -c -P ${DL_DIR} '${URI}'") else: fetchcmd = d.getVar( "FETCHCOMMAND_wget", True) or d.expand(basecmd + " -P ${DL_DIR} '${URI}'") uri = uri.split(";")[0] uri_decoded = list(decodeurl(uri)) uri_type = uri_decoded[0] uri_host = uri_decoded[1] fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) if not checkonly: logger.info("fetch " + uri) logger.debug(2, "executing " + fetchcmd) bb.fetch2.check_network_access(d, fetchcmd) runfetchcmd(fetchcmd, d, quiet=checkonly) # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath) and not checkonly: raise FetchError( "The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri) return True
def _write_configspec(self, ud, d): """ Create config spec file (ud.configspecfile) for ccase view """ config_spec = "" custom_config_spec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", d) if custom_config_spec is not None: for line in custom_config_spec.split("\\n"): config_spec += line+"\n" bb.warn("A custom config spec has been set, SRCREV is only relevant for the tarball name.") else: config_spec += "element * CHECKEDOUT\n" config_spec += "element * %s\n" % ud.label config_spec += "load %s%s\n" % (ud.vob, ud.module) logger.info("Using config spec: \n%s" % config_spec) with open(ud.configspecfile, 'w') as f: f.write(config_spec)
def download(self, ud, d): """Fetch url""" logger.debug( 2, "Fetch: checking for module directory '" + ud.moddir + "'") lf = bb.utils.lockfile(ud.svnlock) try: if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK): svnupdatecmd = self._buildsvncommand(ud, d, "update") logger.info("Update " + ud.url) # We need to attempt to run svn upgrade first in case its an older working format try: runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir) except FetchError: pass logger.debug(1, "Running %s", svnupdatecmd) bb.fetch2.check_network_access(d, svnupdatecmd, ud.url) runfetchcmd(svnupdatecmd, d, workdir=ud.moddir) else: svnfetchcmd = self._buildsvncommand(ud, d, "fetch") logger.info("Fetch " + ud.url) # check out sources there bb.utils.mkdirhier(ud.pkgdir) logger.debug(1, "Running %s", svnfetchcmd) bb.fetch2.check_network_access(d, svnfetchcmd, ud.url) runfetchcmd(svnfetchcmd, d, workdir=ud.pkgdir) scmdata = ud.parm.get("scmdata", "") if scmdata == "keep": tar_flags = "" else: tar_flags = "--exclude='.svn'" # tar them up to a defined filename runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d, cleanup=[ud.localpath], workdir=ud.pkgdir) finally: bb.utils.unlockfile(lf)
def download(self, ud, d): """Fetch packages""" logger.info("npm install URL: " + ud.url) fetchcmd = ud.npmcmd + " " fetchcmd += d.getVar("NPM_ARCHFLAGS", True) or "" fetchcmd += " install " + ud.fetchname fetchcmd += " --force" bb.fetch2.check_network_access(d, fetchcmd) if not os.path.exists(ud.installdir): bb.utils.mkdirhier(ud.installdir) logger.debug(2, "npm install dir: " + ud.installdir) logger.debug(2, "npm cmd: " + fetchcmd) npm_version = subprocess.check_output([ud.npmcmd, "-v"], stderr=subprocess.STDOUT).strip() logger.debug(2, "npm version: " + npm_version) os.chdir(ud.installdir) with open("package.json", 'w') as f: f.write("{}\n") runfetchcmd(fetchcmd, d, quiet=False) return True
def download(self, loc, ud, d): """Fetch url""" logger.debug( 2, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): updatecmd = self._buildhgcommand(ud, d, "pull") logger.info("Update " + loc) # update sources there os.chdir(ud.moddir) logger.debug(1, "Running %s", updatecmd) bb.fetch2.check_network_access(d, updatecmd, ud.url) runfetchcmd(updatecmd, d) else: fetchcmd = self._buildhgcommand(ud, d, "fetch") logger.info("Fetch " + loc) # check out sources there bb.utils.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) logger.debug(1, "Running %s", fetchcmd) bb.fetch2.check_network_access(d, fetchcmd, ud.url) runfetchcmd(fetchcmd, d) # Even when we clone (fetch), we still need to update as hg's clone # won't checkout the specified revision if its on a branch updatecmd = self._buildhgcommand(ud, d, "update") os.chdir(ud.moddir) logger.debug(1, "Running %s", updatecmd) runfetchcmd(updatecmd, d) scmdata = ud.parm.get("scmdata", "") if scmdata == "keep": tar_flags = "" else: tar_flags = "--exclude '.hg' --exclude '.hgrags'" os.chdir(ud.pkgdir) runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup=[ud.localpath])
def download(self, loc, ud, d): """Fetch urls""" svkroot = ud.host + ud.path svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module) if ud.revision: svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module) # create temp directory localdata = data.createCopy(d) data.update_data(localdata) logger.debug(2, "Fetch: creating temporary directory") bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) tmppipe = os.popen( data.getVar('MKTEMPDIRCMD', localdata, True) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: logger.error() raise FetchError( "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc) # check out sources there os.chdir(tmpfile) logger.info("Fetch " + loc) logger.debug(1, "Running %s", svkcmd) runfetchcmd(svkcmd, d, cleanup=[tmpfile]) os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) # tar them up to a defined filename runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)), d, cleanup=[ud.localpath]) # cleanup bb.utils.prunedir(tmpfile)
def download(self, ud, d): """Fetch url""" # Make a fresh view bb.utils.mkdirhier(ud.ccasedir) self._write_configspec(ud, d) cmd = self._build_ccase_command(ud, 'mkview') logger.info("creating view [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname) bb.fetch2.check_network_access(d, cmd, ud.url) try: runfetchcmd(cmd, d) except FetchError as e: if "CRCLI2008E" in e.msg: raise FetchError("%s\n%s\n" % (e.msg, "Call `rcleartool login` in your console to authenticate to the clearcase server before running bitbake.")) else: raise e # Set configspec: Setting the configspec effectively fetches the files as defined in the configspec cmd = self._build_ccase_command(ud, 'setcs'); logger.info("fetching data [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname) bb.fetch2.check_network_access(d, cmd, ud.url) output = runfetchcmd(cmd, d, workdir=ud.viewdir) logger.info("%s", output) # Copy the configspec to the viewdir so we have it in our source tarball later shutil.copyfile(ud.configspecfile, os.path.join(ud.viewdir, ud.csname)) # Clean clearcase meta-data before tar runfetchcmd('tar -czf "%s" .' % (ud.localpath), d, cleanup = [ud.localpath]) # Clean up so we can create a new view next time self.clean(ud, d);
def download(self, loc, ud, d): """ do fetch """ # if the package has been downloaded, just return if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK): logger.debug(1, "%s already exists (or was stashed). Skipping gclient sync.", ud.localpath) return depot_dir = data.getVar("DEPOTDIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "depot") sync_dir = os.path.join(depot_dir, ud.packname) bb.utils.mkdirhier(sync_dir) os.chdir(sync_dir) if not os.path.exists(os.path.join(sync_dir, ".gclient")): logger.info('This is the first time to sync this depot, config it as htttp://%s%s' % (ud.host, ud.path)) runfetchcmd('gclient config http://%s%s' % (ud.host, ud.path), d) logger.info('Start to sync source code..') runfetchcmd('gclient fetch --jobs %s' % ud.njobs, d) logger.info('Creating tarball %s.' % ud.localfile) runfetchcmd('tar --exclude .svn --exclude .git -czf %s ./' % os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), d)
def download(self, loc, ud, d): """Fetch url""" logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK): updatecmd = self._buildhgcommand(ud, d, "pull") logger.info("Update " + loc) # update sources there os.chdir(ud.moddir) logger.debug(1, "Running %s", updatecmd) bb.fetch2.check_network_access(d, updatecmd) runfetchcmd(updatecmd, d) else: fetchcmd = self._buildhgcommand(ud, d, "fetch") logger.info("Fetch " + loc) # check out sources there bb.mkdirhier(ud.pkgdir) os.chdir(ud.pkgdir) logger.debug(1, "Running %s", fetchcmd) bb.fetch2.check_network_access(d, fetchcmd) runfetchcmd(fetchcmd, d) # Even when we clone (fetch), we still need to update as hg's clone # won't checkout the specified revision if its on a branch updatecmd = self._buildhgcommand(ud, d, "update") os.chdir(ud.moddir) logger.debug(1, "Running %s", updatecmd) runfetchcmd(updatecmd, d) scmdata = ud.parm.get("scmdata", "") if scmdata == "keep": tar_flags = "" else: tar_flags = "--exclude '.hg' --exclude '.hgrags'" os.chdir(ud.pkgdir) runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
def build_mirror_data(self, ud, d): if ud.shallow and ud.write_shallow_tarballs: if not os.path.exists(ud.fullshallow): if os.path.islink(ud.fullshallow): os.unlink(ud.fullshallow) tempdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR')) shallowclone = os.path.join(tempdir, 'git') try: self.clone_shallow_local(ud, shallowclone, d) logger.info("Creating tarball of git repository") runfetchcmd("tar -czf %s ." % ud.fullshallow, d, workdir=shallowclone) runfetchcmd("touch %s.done" % ud.fullshallow, d) finally: bb.utils.remove(tempdir, recurse=True) elif ud.write_tarballs and not os.path.exists(ud.fullmirror): if os.path.islink(ud.fullmirror): os.unlink(ud.fullmirror) logger.info("Creating tarball of git repository") runfetchcmd("tar -czf %s ." % ud.fullmirror, d, workdir=ud.clonedir) runfetchcmd("touch %s.done" % ud.fullmirror, d)
def download(self, ud, d, checkonly = False): """Fetch urls""" basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate" if not checkonly and 'downloadfilename' in ud.parm: dldir = d.getVar("DL_DIR", True) bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile)) basecmd += " -O " + dldir + os.sep + ud.localfile if checkonly: fetchcmd = d.getVar("CHECKCOMMAND_wget", True) or d.expand(basecmd + " --spider '${URI}'") elif os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = d.getVar("RESUMECOMMAND_wget", True) or d.expand(basecmd + " -c -P ${DL_DIR} '${URI}'") else: fetchcmd = d.getVar("FETCHCOMMAND_wget", True) or d.expand(basecmd + " -P ${DL_DIR} '${URI}'") uri = ud.url.split(";")[0] fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) if not checkonly: logger.info("fetch " + uri) logger.debug(2, "executing " + fetchcmd) bb.fetch2.check_network_access(d, fetchcmd) runfetchcmd(fetchcmd, d, quiet=checkonly) # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath) and not checkonly: raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri) if not checkonly and os.path.getsize(ud.localpath) == 0: os.remove(ud.localpath) raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri) return True
def build_mirror_data(self, ud, d): if ud.shallows and ud.write_shallow_tarballs: if not os.path.exists(ud.fullshallow): if os.path.islink(ud.fullshallow): os.unlink(ud.fullshallow) tempdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR', True)) shallowclone = os.path.join(tempdir, 'git') try: repourl = self._get_repo_url(ud) branchinfo = dict((name, (ud.shallows[name], ud.revisions[name], ud.branches[name])) for name in ud.names) self._populate_shallowclone(repourl, ud.clonedir, shallowclone, ud.basecmd, branchinfo, ud.nobranch, ud.allbranches, ud.bareclone, d) logger.info("Creating tarball of git repository") runfetchcmd("tar -czf %s %s" % (ud.fullshallow, os.path.join(".")), d) runfetchcmd("touch %s.done" % ud.fullshallow, d) finally: bb.utils.remove(tempdir, recurse=True) elif ud.write_tarballs and not os.path.exists(ud.fullmirror): if os.path.islink(ud.fullmirror): os.unlink(ud.fullmirror) os.chdir(ud.clonedir) logger.info("Creating tarball of git repository") runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".")), d) runfetchcmd("touch %s.done" % ud.fullmirror, d)
def download(self, loc, ud, d): """ Fetch urls """ (host, depot, user, pswd, parm) = Perforce.doparse(loc, d) if depot.find("/...") != -1: path = depot[: depot.find("/...")] else: path = depot module = parm.get("module", os.path.basename(path)) localdata = data.createCopy(d) data.setVar("OVERRIDES", "p4:%s" % data.getVar("OVERRIDES", localdata), localdata) data.update_data(localdata) # Get the p4 command p4opt = "" if user: p4opt += " -u %s" % (user) if pswd: p4opt += " -P %s" % (pswd) if host: p4opt += " -p %s" % (host) p4cmd = data.getVar("FETCHCOMMAND", localdata, True) # create temp directory logger.debug(2, "Fetch: creating temporary directory") bb.utils.mkdirhier(data.expand("${WORKDIR}", localdata)) data.setVar("TMPBASE", data.expand("${WORKDIR}/oep4.XXXXXX", localdata), localdata) tmppipe = os.popen(data.getVar("MKTEMPDIRCMD", localdata, True) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc) if "label" in parm: depot = "%s@%s" % (depot, parm["label"]) else: cset = Perforce.getcset(d, depot, host, user, pswd, parm) depot = "%s@%s" % (depot, cset) os.chdir(tmpfile) logger.info("Fetch " + loc) logger.info("%s%s files %s", p4cmd, p4opt, depot) p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot)) if not p4file: raise FetchError("Fetch: unable to get the P4 files from %s" % depot, loc) count = 0 for file in p4file: list = file.split() if list[2] == "delete": continue dest = list[0][len(path) + 1 :] where = dest.find("#") os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0])) count = count + 1 if count == 0: logger.error() raise FetchError("Fetch: No files gathered from the P4 fetch", loc) runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup=[ud.localpath]) # cleanup bb.utils.prunedir(tmpfile)
def download(self, ud, d): """ Fetch urls """ (host, depot, user, pswd, parm) = Perforce.doparse(ud.url, d) if depot.find('/...') != -1: path = depot[:depot.find('/...')] else: path = depot module = parm.get('module', os.path.basename(path)) # Get the p4 command p4opt = "" if user: p4opt += " -u %s" % (user) if pswd: p4opt += " -P %s" % (pswd) if host: p4opt += " -p %s" % (host) p4cmd = d.getVar('FETCHCMD_p4', True) or "p4" # create temp directory logger.debug(2, "Fetch: creating temporary directory") bb.utils.mkdirhier(d.expand('${WORKDIR}')) mktemp = d.getVar("FETCHCMD_p4mktemp", True) or d.expand("mktemp -d -q '${WORKDIR}/oep4.XXXXXX'") tmpfile, errors = bb.process.run(mktemp) tmpfile = tmpfile.strip() if not tmpfile: raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url) if "label" in parm: depot = "%s@%s" % (depot, parm["label"]) else: cset = Perforce.getcset(d, depot, host, user, pswd, parm) depot = "%s@%s" % (depot, cset) os.chdir(tmpfile) logger.info("Fetch " + ud.url) logger.info("%s%s files %s", p4cmd, p4opt, depot) p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot)) p4file = [f.rstrip() for f in p4file.splitlines()] if not p4file: raise FetchError("Fetch: unable to get the P4 files from %s" % depot, ud.url) count = 0 for file in p4file: list = file.split() if list[2] == "delete": continue dest = list[0][len(path)+1:] where = dest.find("#") subprocess.call("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]), shell=True) count = count + 1 if count == 0: logger.error() raise FetchError("Fetch: No files gathered from the P4 fetch", ud.url) runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath]) # cleanup bb.utils.prunedir(tmpfile)
def download(self, ud, d): method = ud.parm.get('method', 'pserver') localdir = ud.parm.get('localdir', ud.module) cvs_port = ud.parm.get('port', '') cvs_rsh = None if method == "ext": if "rsh" in ud.parm: cvs_rsh = ud.parm["rsh"] if method == "dir": cvsroot = ud.path else: cvsroot = ":" + method cvsproxyhost = d.getVar('CVS_PROXY_HOST') if cvsproxyhost: cvsroot += ";proxy=" + cvsproxyhost cvsproxyport = d.getVar('CVS_PROXY_PORT') if cvsproxyport: cvsroot += ";proxyport=" + cvsproxyport cvsroot += ":" + ud.user if ud.pswd: cvsroot += ":" + ud.pswd cvsroot += "@" + ud.host + ":" + cvs_port + ud.path options = [] if 'norecurse' in ud.parm: options.append("-l") if ud.date: # treat YYYYMMDDHHMM specially for CVS if len(ud.date) == 12: options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12])) else: options.append("-D \"%s UTC\"" % ud.date) if ud.tag: options.append("-r %s" % ud.tag) cvsbasecmd = d.getVar("FETCHCMD_cvs") or "/usr/bin/env cvs" cvscmd = cvsbasecmd + " '-d" + cvsroot + "' co " + " ".join(options) + " " + ud.module cvsupdatecmd = cvsbasecmd + " '-d" + cvsroot + "' update -d -P " + " ".join(options) if cvs_rsh: cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) # create module directory logger.debug(2, "Fetch: checking for module directory") pkg = d.getVar('PN') cvsdir = d.getVar("CVSDIR") or (d.getVar("DL_DIR") + "/cvs") pkgdir = os.path.join(cvsdir, pkg) moddir = os.path.join(pkgdir, localdir) workdir = None if os.access(os.path.join(moddir, 'CVS'), os.R_OK): logger.info("Update " + ud.url) bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url) # update sources there workdir = moddir cmd = cvsupdatecmd else: logger.info("Fetch " + ud.url) # check out sources there bb.utils.mkdirhier(pkgdir) workdir = pkgdir logger.debug(1, "Running %s", cvscmd) bb.fetch2.check_network_access(d, cvscmd, ud.url) cmd = cvscmd runfetchcmd(cmd, d, cleanup=[moddir], workdir=workdir) if not os.access(moddir, os.R_OK): raise FetchError("Directory %s was not readable despite sucessful fetch?!" % moddir, ud.url) scmdata = ud.parm.get("scmdata", "") if scmdata == "keep": tar_flags = "" else: tar_flags = "--exclude='CVS'" # tar them up to a defined filename workdir = None if 'fullpath' in ud.parm: workdir = pkgdir cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir) else: workdir = os.path.dirname(os.path.realpath(moddir)) cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir)) runfetchcmd(cmd, d, cleanup=[ud.localpath], workdir=workdir)
def download(self, ud, d): method = ud.parm.get('method', 'pserver') localdir = ud.parm.get('localdir', ud.module) cvs_port = ud.parm.get('port', '') cvs_rsh = None if method == "ext": if "rsh" in ud.parm: cvs_rsh = ud.parm["rsh"] if method == "dir": cvsroot = ud.path else: cvsroot = ":" + method cvsproxyhost = d.getVar('CVS_PROXY_HOST') if cvsproxyhost: cvsroot += ";proxy=" + cvsproxyhost cvsproxyport = d.getVar('CVS_PROXY_PORT') if cvsproxyport: cvsroot += ";proxyport=" + cvsproxyport cvsroot += ":" + ud.user if ud.pswd: cvsroot += ":" + ud.pswd cvsroot += "@" + ud.host + ":" + cvs_port + ud.path options = [] if 'norecurse' in ud.parm: options.append("-l") if ud.date: # treat YYYYMMDDHHMM specially for CVS if len(ud.date) == 12: options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12])) else: options.append("-D \"%s UTC\"" % ud.date) if ud.tag: options.append("-r %s" % ud.tag) cvsbasecmd = d.getVar("FETCHCMD_cvs") or "/usr/bin/env cvs" cvscmd = cvsbasecmd + " '-d" + cvsroot + "' co " + " ".join( options) + " " + ud.module cvsupdatecmd = cvsbasecmd + " '-d" + cvsroot + "' update -d -P " + " ".join( options) if cvs_rsh: cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) # create module directory logger.debug(2, "Fetch: checking for module directory") pkg = d.getVar('PN') cvsdir = d.getVar("CVSDIR") or (d.getVar("DL_DIR") + "/cvs") pkgdir = os.path.join(cvsdir, pkg) moddir = os.path.join(pkgdir, localdir) workdir = None if os.access(os.path.join(moddir, 'CVS'), os.R_OK): logger.info("Update " + ud.url) bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url) # update sources there workdir = moddir cmd = cvsupdatecmd else: logger.info("Fetch " + ud.url) # check out sources there bb.utils.mkdirhier(pkgdir) workdir = pkgdir logger.debug(1, "Running %s", cvscmd) bb.fetch2.check_network_access(d, cvscmd, ud.url) cmd = cvscmd runfetchcmd(cmd, d, cleanup=[moddir], workdir=workdir) if not os.access(moddir, os.R_OK): raise FetchError( "Directory %s was not readable despite sucessful fetch?!" % moddir, ud.url) scmdata = ud.parm.get("scmdata", "") if scmdata == "keep": tar_flags = "" else: tar_flags = "--exclude='CVS'" # tar them up to a defined filename workdir = None if 'fullpath' in ud.parm: workdir = pkgdir cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir) else: workdir = os.path.dirname(os.path.realpath(moddir)) cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir)) runfetchcmd(cmd, d, cleanup=[ud.localpath], workdir=workdir)
def build_mirror_data(self, url, ud, d): # Generate a mirror tarball if needed if ud.write_tarballs and (ud.repochanged or not os.path.exists(ud.fullmirror)): os.chdir(ud.clonedir) logger.info("Creating tarball of git repository") runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d)
def download(self, loc, ud, d): """ Fetch urls """ (host, depot, user, pswd, parm) = Perforce.doparse(loc, d) if depot.find('/...') != -1: path = depot[:depot.find('/...')] else: path = depot module = parm.get('module', os.path.basename(path)) localdata = data.createCopy(d) data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) # Get the p4 command p4opt = "" if user: p4opt += " -u %s" % (user) if pswd: p4opt += " -P %s" % (pswd) if host: p4opt += " -p %s" % (host) p4cmd = data.getVar('FETCHCOMMAND', localdata, True) # create temp directory logger.debug(2, "Fetch: creating temporary directory") bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata) tmppipe = os.popen( data.getVar('MKTEMPDIRCMD', localdata, True) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: raise FetchError( "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc) if "label" in parm: depot = "%s@%s" % (depot, parm["label"]) else: cset = Perforce.getcset(d, depot, host, user, pswd, parm) depot = "%s@%s" % (depot, cset) os.chdir(tmpfile) logger.info("Fetch " + loc) logger.info("%s%s files %s", p4cmd, p4opt, depot) p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot)) if not p4file: raise FetchError( "Fetch: unable to get the P4 files from %s" % depot, loc) count = 0 for file in p4file: list = file.split() if list[2] == "delete": continue dest = list[0][len(path) + 1:] where = dest.find("#") os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0])) count = count + 1 if count == 0: logger.error() raise FetchError("Fetch: No files gathered from the P4 fetch", loc) runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup=[ud.localpath]) # cleanup bb.utils.prunedir(tmpfile)
def download(self, loc, ud, d): method = ud.parm.get('method', 'pserver') localdir = ud.parm.get('localdir', ud.module) cvs_port = ud.parm.get('port', '') cvs_rsh = None if method == "ext": if "rsh" in ud.parm: cvs_rsh = ud.parm["rsh"] if method == "dir": cvsroot = ud.path else: cvsroot = ":" + method cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True) if cvsproxyhost: cvsroot += ";proxy=" + cvsproxyhost cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True) if cvsproxyport: cvsroot += ";proxyport=" + cvsproxyport cvsroot += ":" + ud.user if ud.pswd: cvsroot += ":" + ud.pswd cvsroot += "@" + ud.host + ":" + cvs_port + ud.path options = [] if 'norecurse' in ud.parm: options.append("-l") if ud.date: # treat YYYYMMDDHHMM specially for CVS if len(ud.date) == 12: options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12])) else: options.append("-D \"%s UTC\"" % ud.date) if ud.tag: options.append("-r %s" % ud.tag) localdata = data.createCopy(d) data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) data.setVar('CVSROOT', cvsroot, localdata) data.setVar('CVSCOOPTS', " ".join(options), localdata) data.setVar('CVSMODULE', ud.module, localdata) cvscmd = data.getVar('FETCHCOMMAND', localdata, True) cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, True) if cvs_rsh: cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) # create module directory logger.debug(2, "Fetch: checking for module directory") pkg = data.expand('${PN}', d) pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg) moddir = os.path.join(pkgdir, localdir) if os.access(os.path.join(moddir, 'CVS'), os.R_OK): logger.info("Update " + loc) bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url) # update sources there os.chdir(moddir) cmd = cvsupdatecmd else: logger.info("Fetch " + loc) # check out sources there bb.utils.mkdirhier(pkgdir) os.chdir(pkgdir) logger.debug(1, "Running %s", cvscmd) bb.fetch2.check_network_access(d, cvscmd, ud.url) cmd = cvscmd runfetchcmd(cmd, d, cleanup = [moddir]) if not os.access(moddir, os.R_OK): raise FetchError("Directory %s was not readable despite sucessful fetch?!" % moddir, ud.url) scmdata = ud.parm.get("scmdata", "") if scmdata == "keep": tar_flags = "" else: tar_flags = "--exclude 'CVS'" # tar them up to a defined filename if 'fullpath' in ud.parm: os.chdir(pkgdir) cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir) else: os.chdir(moddir) os.chdir('..') cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir)) runfetchcmd(cmd, d, cleanup = [ud.localpath])
def download(self, ud, d): """ Fetch urls """ (host, depot, user, pswd, parm) = Perforce.doparse(ud.url, d) if depot.find('/...') != -1: path = depot[:depot.find('/...')] else: path = depot[:depot.rfind('/')] module = parm.get('module', os.path.basename(path)) # Get the p4 command p4opt = "" if user: p4opt += " -u %s" % (user) if pswd: p4opt += " -P %s" % (pswd) if host: p4opt += " -p %s" % (host) p4cmd = d.getVar('FETCHCMD_p4', True) or "p4" # create temp directory logger.debug(2, "Fetch: creating temporary directory") bb.utils.mkdirhier(d.expand('${WORKDIR}')) mktemp = d.getVar( "FETCHCMD_p4mktemp", True) or d.expand("mktemp -d -q '${WORKDIR}/oep4.XXXXXX'") tmpfile, errors = bb.process.run(mktemp) tmpfile = tmpfile.strip() if not tmpfile: raise FetchError( "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", ud.url) if "label" in parm: depot = "%s@%s" % (depot, parm["label"]) else: cset = Perforce.getcset(d, depot, host, user, pswd, parm) depot = "%s@%s" % (depot, cset) os.chdir(tmpfile) logger.info("Fetch " + ud.url) logger.info("%s%s files %s", p4cmd, p4opt, depot) p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot)) p4file = [f.rstrip() for f in p4file.splitlines()] if not p4file: raise FetchError( "Fetch: unable to get the P4 files from %s" % depot, ud.url) count = 0 for file in p4file: list = file.split() if list[2] == "delete": continue dest = list[0][len(path) + 1:] where = dest.find("#") subprocess.call("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]), shell=True) count = count + 1 if count == 0: logger.error() raise FetchError("Fetch: No files gathered from the P4 fetch", ud.url) runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup=[ud.localpath]) # cleanup bb.utils.prunedir(tmpfile)