def go(self, loc, ud, d): """Fetch urls""" if not self.forcefetch(loc, ud, d) and Fetch.try_mirror( d, ud.localfile): return svkroot = ud.host + ud.path # pyflakes claims date is not known... it looks right svkcmd = "svk co -r {%s} %s/%s" % (date, svkroot, ud.module) if ud.revision: svkcmd = "svk co -r %s/%s" % (ud.revision, svkroot, ud.module) # create temp directory localdata = data.createCopy(d) data.update_data(localdata) bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory") bb.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) tmppipe = os.popen( data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: bb.msg.error( bb.msg.domain.Fetcher, "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH." ) raise FetchError(ud.module) # check out sources there os.chdir(tmpfile) bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svkcmd) myret = os.system(svkcmd) if myret != 0: try: os.rmdir(tmpfile) except OSError: pass raise FetchError(ud.module) os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) # tar them up to a defined filename myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module))) if myret != 0: try: os.unlink(ud.localpath) except OSError: pass raise FetchError(ud.module) # cleanup os.system('rm -rf %s' % tmpfile)
def go(self, loc, ud, d): """Fetch urls""" svkroot = ud.host + ud.path svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module) if ud.revision: svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module) # create temp directory localdata = data.createCopy(d) data.update_data(localdata) logger.debug(2, "Fetch: creating temporary directory") bb.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) tmppipe = os.popen( data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: logger.error( "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH." ) raise FetchError(ud.module) # check out sources there os.chdir(tmpfile) logger.info("Fetch " + loc) logger.debug(1, "Running %s", svkcmd) myret = os.system(svkcmd) if myret != 0: try: os.rmdir(tmpfile) except OSError: pass raise FetchError(ud.module) os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module))) # tar them up to a defined filename myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module))) if myret != 0: try: os.unlink(ud.localpath) except OSError: pass raise FetchError(ud.module) # cleanup bb.utils.prunedir(tmpfile)
def _buildosccommand(self, ud, d, command): """ Build up an ocs commandline based on ud command is "fetch", "update", "info" """ basecmd = data.expand('${FETCHCMD_osc}', d) proto = "ocs" if "proto" in ud.parm: proto = ud.parm["proto"] options = [] config = "-c %s" % self.generate_config(ud, d) if ud.revision: options.append("-r %s" % ud.revision) coroot = ud.path if coroot.startswith('/'): # Remove leading slash as os.path.join can't cope coroot = coroot[1:] if command is "fetch": osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options)) elif command is "update": osccmd = "%s %s up %s" % (basecmd, config, " ".join(options)) else: raise FetchError("Invalid osc command %s" % command) return osccmd
def _buildosccommand(self, ud, d, command): """ Build up an ocs commandline based on ud command is "fetch", "update", "info" """ basecmd = data.expand('${FETCHCMD_osc}', d) proto = ud.parm.get('proto', 'ocs') options = [] config = "-c %s" % self.generate_config(ud, d) if ud.revision: options.append("-r %s" % ud.revision) coroot = self._strip_leading_slashes(ud.path) if command is "fetch": osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options)) elif command is "update": osccmd = "%s %s up %s" % (basecmd, config, " ".join(options)) else: raise FetchError("Invalid osc command %s" % command) return osccmd
def _buildbzrcommand(self, ud, d, command): """ Build up an bzr commandline based on ud command is "fetch", "update", "revno" """ basecmd = data.expand('${FETCHCMD_bzr}', d) proto = "http" if "proto" in ud.parm: proto = ud.parm["proto"] bzrroot = ud.host + ud.path options = [] if command is "revno": bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot) else: if ud.revision: options.append("-r %s" % ud.revision) if command is "fetch": bzrcmd = "%s co %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot) elif command is "update": bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options)) else: raise FetchError("Invalid bzr command %s" % command) return bzrcmd
def _buildsvncommand(self, ud, d, command): """ Build up an svn commandline based on ud command is "fetch", "update", "info" """ basecmd = data.expand('${FETCHCMD_svn}', d) proto = ud.parm.get('proto', 'svn') svn_rsh = None if proto == "svn+ssh" and "rsh" in ud.parm: svn_rsh = ud.parm["rsh"] svnroot = ud.host + ud.path # either use the revision, or SRCDATE in braces, options = [] if ud.user: options.append("--username %s" % ud.user) if ud.pswd: options.append("--password %s" % ud.pswd) if command is "info": svncmd = "%s info %s %s://%s/%s/" % (basecmd, " ".join(options), proto, svnroot, ud.module) else: suffix = "" if ud.revision: options.append("-r %s" % ud.revision) suffix = "@%s" % (ud.revision) elif ud.date: options.append("-r {%s}" % ud.date) if command is "fetch": svncmd = "%s co %s %s://%s/%s%s %s" % (basecmd, " ".join( options), proto, svnroot, ud.module, suffix, ud.module) elif command is "update": svncmd = "%s update %s" % (basecmd, " ".join(options)) else: raise FetchError("Invalid svn command %s" % command) if svn_rsh: svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd) return svncmd
def _buildhgcommand(self, ud, d, command): """ Build up an hg commandline based on ud command is "fetch", "update", "info" """ basecmd = data.expand('${FETCHCMD_hg}', d) proto = "http" if "proto" in ud.parm: proto = ud.parm["proto"] host = ud.host if proto == "file": host = "/" ud.host = "localhost" if not ud.user: hgroot = host + ud.path else: hgroot = ud.user + "@" + host + ud.path if command is "info": return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module) options = [] if ud.revision: options.append("-r %s" % ud.revision) if command is "fetch": cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module) elif command is "pull": # do not pass options list; limiting pull to rev causes the local # repo not to contain it and immediately following "update" command # will crash cmd = "%s pull" % (basecmd) elif command is "update": cmd = "%s update -C %s" % (basecmd, " ".join(options)) else: raise FetchError("Invalid hg command %s" % command) return cmd
def go(self, uri, ud, d, checkonly=False): """Fetch urls""" def fetch_uri(uri, ud, d): if checkonly: fetchcmd = data.getVar("CHECKCOMMAND", d, 1) elif os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND", d, 1) else: fetchcmd = data.getVar("FETCHCOMMAND", d, 1) uri = uri.split(";")[0] uri_decoded = list(decodeurl(uri)) uri_type = uri_decoded[0] uri_host = uri_decoded[1] fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) logger.info("fetch " + uri) logger.debug(2, "executing " + fetchcmd) runfetchcmd(fetchcmd, d) # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath) and not checkonly: logger.debug( 2, "The fetch command for %s returned success but %s doesn't exist?...", uri, ud.localpath) return False return True localdata = data.createCopy(d) data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) if fetch_uri(uri, ud, localdata): return True raise FetchError(uri)
def go(self, url, urldata, d): dldir = data.getVar('DL_DIR', d, 1) m = __pattern__.match(url) path = m.group('path') host = m.group('host') port = m.group('port') user = m.group('user') password = m.group('pass') ldir = os.path.join(dldir, host) lpath = os.path.join(ldir, os.path.basename(path)) if not os.path.exists(ldir): os.makedirs(ldir) if port: port = '-P %s' % port else: port = '' if user: fr = user if password: fr += ':%s' % password fr += '@%s' % host else: fr = host fr += ':%s' % path import commands cmd = 'scp -B -r %s %s %s/' % ( port, commands.mkarg(fr), commands.mkarg(ldir) ) (exitstatus, output) = commands.getstatusoutput(cmd) if exitstatus != 0: print output raise FetchError('Unable to fetch %s' % url)
def _buildhgcommand(self, ud, d, command): """ Build up an hg commandline based on ud command is "fetch", "update", "info" """ basecmd = data.expand('${FETCHCMD_hg}', d) proto = "http" if "proto" in ud.parm: proto = ud.parm["proto"] host = ud.host if proto == "file": host = "/" ud.host = "localhost" hgroot = host + ud.path if command is "info": return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module) options = [] if ud.revision: options.append("-r %s" % ud.revision) if command is "fetch": cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module) elif command is "pull": cmd = "%s pull %s" % (basecmd, " ".join(options)) elif command is "update": cmd = "%s update -C %s" % (basecmd, " ".join(options)) else: raise FetchError("Invalid hg command %s" % command) return cmd
def go(self, loc, ud, d): # try to use the tarball stash if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile): bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping cvs checkout." % ud.localpath) return method = "pserver" if "method" in ud.parm: method = ud.parm["method"] localdir = ud.module if "localdir" in ud.parm: localdir = ud.parm["localdir"] cvs_port = "" if "port" in ud.parm: cvs_port = ud.parm["port"] cvs_rsh = None if method == "ext": if "rsh" in ud.parm: cvs_rsh = ud.parm["rsh"] if method == "dir": cvsroot = ud.path else: cvsroot = ":" + method cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True) if cvsproxyhost: cvsroot += ";proxy=" + cvsproxyhost cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True) if cvsproxyport: cvsroot += ";proxyport=" + cvsproxyport cvsroot += ":" + ud.user if ud.pswd: cvsroot += ":" + ud.pswd cvsroot += "@" + ud.host + ":" + cvs_port + ud.path options = [] if 'norecurse' in ud.parm: options.append("-l") if ud.date: # treat YYYYMMDDHHMM specially for CVS if len(ud.date) == 12: options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12])) else: options.append("-D \"%s UTC\"" % ud.date) if ud.tag: options.append("-r %s" % ud.tag) localdata = data.createCopy(d) data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) data.setVar('CVSROOT', cvsroot, localdata) data.setVar('CVSCOOPTS', " ".join(options), localdata) data.setVar('CVSMODULE', ud.module, localdata) cvscmd = data.getVar('FETCHCOMMAND', localdata, 1) cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1) if cvs_rsh: cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) # create module directory bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory") pkg = data.expand('${PN}', d) pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg) moddir = os.path.join(pkgdir,localdir) if os.access(os.path.join(moddir,'CVS'), os.R_OK): bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) # update sources there os.chdir(moddir) myret = os.system(cvsupdatecmd) else: bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) # check out sources there bb.mkdirhier(pkgdir) os.chdir(pkgdir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cvscmd) myret = os.system(cvscmd) if myret != 0 or not os.access(moddir, os.R_OK): try: os.rmdir(moddir) except OSError: pass raise FetchError(ud.module) # tar them up to a defined filename if 'fullpath' in ud.parm: os.chdir(pkgdir) myret = os.system("tar -czf %s %s" % (ud.localpath, localdir)) else: os.chdir(moddir) os.chdir('..') myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(moddir))) if myret != 0: try: os.unlink(ud.localpath) except OSError: pass raise FetchError(ud.module)
def go(self, uri, ud, d, checkonly=False): """Fetch urls""" def fetch_uri(uri, ud, d): if checkonly: fetchcmd = data.getVar("CHECKCOMMAND", d, 1) elif os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND", d, 1) else: fetchcmd = data.getVar("FETCHCOMMAND", d, 1) bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri) fetchcmd = fetchcmd.replace("${URI}", uri) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd) ret = os.system(fetchcmd) if ret != 0: return False # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath): bb.msg.debug( 2, bb.msg.domain.Fetcher, "The fetch command for %s returned success but %s doesn't exist?..." % (uri, ud.localpath)) return False return True localdata = data.createCopy(d) data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) premirrors = [ i.split() for i in ( data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ] for (find, replace) in premirrors: newuri = uri_replace(uri, find, replace, d) if newuri != uri: if fetch_uri(newuri, ud, localdata): return True if fetch_uri(uri, ud, localdata): return True # try mirrors mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ] for (find, replace) in mirrors: newuri = uri_replace(uri, find, replace, d) if newuri != uri: if fetch_uri(newuri, ud, localdata): return True raise FetchError(uri)
def go(self, uri, ud, d, checkonly = False): """Fetch urls""" def fetch_uri(uri, ud, d): if checkonly: fetchcmd = data.getVar("CHECKCOMMAND", d, 1) elif os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND", d, 1) else: fetchcmd = data.getVar("FETCHCOMMAND", d, 1) uri = uri.split(";")[0] uri_decoded = list(bb.decodeurl(uri)) uri_type = uri_decoded[0] uri_host = uri_decoded[1] bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri) fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) httpproxy = None ftpproxy = None if uri_type == 'http': httpproxy = data.getVar("HTTP_PROXY", d, True) httpproxy_ignore = (data.getVar("HTTP_PROXY_IGNORE", d, True) or "").split() for p in httpproxy_ignore: if uri_host.endswith(p): httpproxy = None break if uri_type == 'ftp': ftpproxy = data.getVar("FTP_PROXY", d, True) ftpproxy_ignore = (data.getVar("HTTP_PROXY_IGNORE", d, True) or "").split() for p in ftpproxy_ignore: if uri_host.endswith(p): ftpproxy = None break if httpproxy: fetchcmd = "http_proxy=" + httpproxy + " " + fetchcmd if ftpproxy: fetchcmd = "ftp_proxy=" + ftpproxy + " " + fetchcmd bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd) ret = os.system(fetchcmd) if ret != 0: return False # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath): bb.msg.debug(2, bb.msg.domain.Fetcher, "The fetch command for %s returned success but %s doesn't exist?..." % (uri, ud.localpath)) return False return True localdata = data.createCopy(d) data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) premirrors = [ i.split() for i in (data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ] for (find, replace) in premirrors: newuri = uri_replace(uri, find, replace, d) if newuri != uri: if fetch_uri(newuri, ud, localdata): return True if fetch_uri(uri, ud, localdata): return True # try mirrors mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ] for (find, replace) in mirrors: newuri = uri_replace(uri, find, replace, d) if newuri != uri: if fetch_uri(newuri, ud, localdata): return True raise FetchError(uri)
def go(self, loc, ud, d): """ Fetch urls """ (host, depot, user, pswd, parm) = Perforce.doparse(loc, d) if depot.find('/...') != -1: path = depot[:depot.find('/...')] else: path = depot module = parm.get('module', os.path.basename(path)) localdata = data.createCopy(d) data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) # Get the p4 command p4opt = "" if user: p4opt += " -u %s" % (user) if pswd: p4opt += " -P %s" % (pswd) if host: p4opt += " -p %s" % (host) p4cmd = data.getVar('FETCHCOMMAND', localdata, 1) # create temp directory logger.debug(2, "Fetch: creating temporary directory") bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata) tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") raise FetchError(module) if "label" in parm: depot = "%s@%s" % (depot, parm["label"]) else: cset = Perforce.getcset(d, depot, host, user, pswd, parm) depot = "%s@%s" % (depot, cset) os.chdir(tmpfile) logger.info("Fetch " + loc) logger.info("%s%s files %s", p4cmd, p4opt, depot) p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot)) if not p4file: logger.error("Fetch: unable to get the P4 files from %s", depot) raise FetchError(module) count = 0 for file in p4file: list = file.split() if list[2] == "delete": continue dest = list[0][len(path)+1:] where = dest.find("#") os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0])) count = count + 1 if count == 0: logger.error("Fetch: No files gathered from the P4 fetch") raise FetchError(module) myret = os.system("tar -czf %s %s" % (ud.localpath, module)) if myret != 0: try: os.unlink(ud.localpath) except OSError: pass raise FetchError(module) # cleanup bb.utils.prunedir(tmpfile)
def go(self, loc, ud, d): method = ud.parm.get('method', 'pserver') localdir = ud.parm.get('localdir', ud.module) cvs_port = ud.parm.get('port', '') cvs_rsh = None if method == "ext": if "rsh" in ud.parm: cvs_rsh = ud.parm["rsh"] if method == "dir": cvsroot = ud.path else: cvsroot = ":" + method cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True) if cvsproxyhost: cvsroot += ";proxy=" + cvsproxyhost cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True) if cvsproxyport: cvsroot += ";proxyport=" + cvsproxyport cvsroot += ":" + ud.user if ud.pswd: cvsroot += ":" + ud.pswd cvsroot += "@" + ud.host + ":" + cvs_port + ud.path options = [] if 'norecurse' in ud.parm: options.append("-l") if ud.date: # treat YYYYMMDDHHMM specially for CVS if len(ud.date) == 12: options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12])) else: options.append("-D \"%s UTC\"" % ud.date) if ud.tag: options.append("-r %s" % ud.tag) localdata = data.createCopy(d) data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) data.setVar('CVSROOT', cvsroot, localdata) data.setVar('CVSCOOPTS', " ".join(options), localdata) data.setVar('CVSMODULE', ud.module, localdata) cvscmd = data.getVar('FETCHCOMMAND', localdata, 1) cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1) if cvs_rsh: cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) # create module directory logger.debug(2, "Fetch: checking for module directory") pkg = data.expand('${PN}', d) pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg) moddir = os.path.join(pkgdir, localdir) if os.access(os.path.join(moddir, 'CVS'), os.R_OK): logger.info("Update " + loc) # update sources there os.chdir(moddir) myret = os.system(cvsupdatecmd) else: logger.info("Fetch " + loc) # check out sources there bb.mkdirhier(pkgdir) os.chdir(pkgdir) logger.debug(1, "Running %s", cvscmd) myret = os.system(cvscmd) if myret != 0 or not os.access(moddir, os.R_OK): try: os.rmdir(moddir) except OSError: pass raise FetchError(ud.module) scmdata = ud.parm.get("scmdata", "") if scmdata == "keep": tar_flags = "" else: tar_flags = "--exclude 'CVS'" # tar them up to a defined filename if 'fullpath' in ud.parm: os.chdir(pkgdir) myret = os.system("tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir)) else: os.chdir(moddir) os.chdir('..') myret = os.system( "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir))) if myret != 0: try: os.unlink(ud.localpath) except OSError: pass raise FetchError(ud.module)
def go(self, loc, ud, d): """ Fetch urls """ # try to use the tarball stash if Fetch.try_mirror(d, ud.localfile): bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping perforce checkout." % ud.localpath) return (host,depot,user,pswd,parm) = Perforce.doparse(loc, d) if depot.find('/...') != -1: path = depot[:depot.find('/...')] else: path = depot if "module" in parm: module = parm["module"] else: module = os.path.basename(path) localdata = data.createCopy(d) data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) # Get the p4 command if user: data.setVar('P4USER', user, localdata) if pswd: data.setVar('P4PASSWD', pswd, localdata) if host: data.setVar('P4PORT', host, localdata) p4cmd = data.getVar('FETCHCOMMAND', localdata, 1) # create temp directory bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory") bb.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata) tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") raise FetchError(module) if "label" in parm: depot = "%s@%s" % (depot,parm["label"]) else: cset = Perforce.getcset(d, depot, host, user, pswd, parm) depot = "%s@%s" % (depot,cset) os.chdir(tmpfile) bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) bb.msg.note(1, bb.msg.domain.Fetcher, "%s files %s" % (p4cmd, depot)) p4file = os.popen("%s files %s" % (p4cmd, depot)) if not p4file: bb.error("Fetch: unable to get the P4 files from %s" % (depot)) raise FetchError(module) count = 0 for file in p4file: list = file.split() if list[2] == "delete": continue dest = list[0][len(path)+1:] where = dest.find("#") os.system("%s print -o %s/%s %s" % (p4cmd, module,dest[:where],list[0])) count = count + 1 if count == 0: bb.error("Fetch: No files gathered from the P4 fetch") raise FetchError(module) myret = os.system("tar -czf %s %s" % (ud.localpath, module)) if myret != 0: try: os.unlink(ud.localpath) except OSError: pass raise FetchError(module) # cleanup os.system('rm -rf %s' % tmpfile)