def uri_replace(uri, uri_find, uri_replace, d): # bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: operating on %s" % uri) if not uri or not uri_find or not uri_replace: bb.msg.debug(1, bb.msg.domain.Fetcher, "uri_replace: passed an undefined value, not replacing") uri_decoded = list(bb.decodeurl(uri)) uri_find_decoded = list(bb.decodeurl(uri_find)) uri_replace_decoded = list(bb.decodeurl(uri_replace)) result_decoded = ['','','','','',{}] for i in uri_find_decoded: loc = uri_find_decoded.index(i) result_decoded[loc] = uri_decoded[loc] import types if type(i) == types.StringType: import re if (re.match(i, uri_decoded[loc])): result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc]) if uri_find_decoded.index(i) == 2: if d: localfn = bb.fetch.localpath(uri, d) if localfn: result_decoded[loc] = os.path.dirname(result_decoded[loc]) + "/" + os.path.basename(bb.fetch.localpath(uri, d)) # bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: matching %s against %s and replacing with %s" % (i, uri_decoded[loc], uri_replace_decoded[loc])) else: # bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: no match") return uri # else: # for j in i.keys(): # FIXME: apply replacements against options return bb.encodeurl(result_decoded)
def uri_replace(uri, uri_find, uri_replace, d): # bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: operating on %s" % uri) if not uri or not uri_find or not uri_replace: bb.msg.debug(1, bb.msg.domain.Fetcher, "uri_replace: passed an undefined value, not replacing") uri_decoded = list(bb.decodeurl(uri)) uri_find_decoded = list(bb.decodeurl(uri_find)) uri_replace_decoded = list(bb.decodeurl(uri_replace)) result_decoded = ['', '', '', '', '', {}] for i in uri_find_decoded: loc = uri_find_decoded.index(i) result_decoded[loc] = uri_decoded[loc] import types if type(i) == types.StringType: if (re.match(i, uri_decoded[loc])): result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc]) if uri_find_decoded.index(i) == 2: if d: localfn = bb.fetch.localpath(uri, d) if localfn: result_decoded[loc] = os.path.dirname( result_decoded[loc]) + "/" + os.path.basename( bb.fetch.localpath(uri, d)) # bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: matching %s against %s and replacing with %s" % (i, uri_decoded[loc], uri_replace_decoded[loc])) else: # bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: no match") return uri # else: # for j in i: # FIXME: apply replacements against options return bb.encodeurl(result_decoded)
def localpath(url, d): (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) if "localpath" in parm: # if user overrides local path, use it. return parm["localpath"] if not "module" in parm: raise MissingParameterError("svn method needs a 'module' parameter") else: module = parm["module"] if "rev" in parm: revision = parm["rev"] else: revision = "" date = Fetch.getSRCDate(d) if "srcdate" in parm: date = parm["srcdate"] if revision: date = "" return os.path.join( data.getVar("DL_DIR", d, 1), data.expand( "%s_%s_%s_%s_%s.tar.gz" % (module.replace("/", "."), host, path.replace("/", "."), revision, date), d ), )
def localpath(url, d): (type, host, path, user, pswd, parm) = bb.decodeurl(bb.data.expand(url, d)) if "localpath" in parm: # if user overrides local path, use it. return parm["localpath"] if not "module" in parm: raise MissingParameterError("cvs method needs a 'module' parameter") else: module = parm["module"] if "tag" in parm: tag = parm["tag"] else: tag = "" if "date" in parm: date = parm["date"] else: if not tag: date = bb.data.getVar("CVSDATE", d, 1) or bb.data.getVar("DATE", d, 1) else: date = "" return os.path.join( bb.data.getVar("DL_DIR", d, 1), bb.data.expand("%s_%s_%s_%s.tar.gz" % (module.replace("/", "."), host, tag, date), d), )
def go(self, d, urls = []): """Fetch urls""" if not urls: urls = self.urls for loc in urls: (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, d)) tag = gettag(parm) proto = getprotocol(parm) gitsrcname = '%s%s' % (host, path.replace('/', '.')) repofilename = 'git_%s.tar.gz' % (gitsrcname) repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename) repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) coname = '%s' % (tag) codir = os.path.join(repodir, coname) cofile = self.localpath(loc, d) # tag=="master" must always update if (tag != "master") and Fetch.try_mirror(d, localfile(loc, d)): bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping git checkout." % cofile) continue if not os.path.exists(repodir): if Fetch.try_mirror(d, repofilename): bb.mkdirhier(repodir) os.chdir(repodir) rungitcmd("tar -xzf %s" % (repofile),d) else: rungitcmd("git clone -n %s://%s%s %s" % (proto, host, path, repodir),d) os.chdir(repodir) rungitcmd("git pull %s://%s%s" % (proto, host, path),d) rungitcmd("git pull --tags %s://%s%s" % (proto, host, path),d) rungitcmd("git prune-packed", d) rungitcmd("git pack-redundant --all | xargs -r rm", d) # Remove all but the .git directory rungitcmd("rm * -Rf", d) # old method of downloading tags #rungitcmd("rsync -a --verbose --stats --progress rsync://%s%s/ %s" % (host, path, os.path.join(repodir, ".git", "")),d) os.chdir(repodir) bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository") rungitcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ),d) if os.path.exists(codir): prunedir(codir) bb.mkdirhier(codir) os.chdir(repodir) rungitcmd("git read-tree %s" % (tag),d) rungitcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")),d) os.chdir(codir) bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout") rungitcmd("tar -czf %s %s" % (cofile, os.path.join(".", "*") ),d)
def localpath(url, d): # strip off parameters (type, host, path, user, pswd, parm) = bb.decodeurl(bb.data.expand(url, d)) if "localpath" in parm: # if user overrides local path, use it. return parm["localpath"] url = bb.encodeurl([type, host, path, user, pswd, {}]) return os.path.join(bb.data.getVar("DL_DIR", d), os.path.basename(url))
def __init__(self, url, d): self.localfile = "" (self.type, self.host, self.path, self.user, self.pswd, self.parm) = bb.decodeurl(data.expand(url, d)) self.date = Fetch.getSRCDate(self, d) self.url = url self.setup = False for m in methods: if m.supports(url, self, d): self.method = m break
def fetch_uri(uri, ud, d): if checkonly: fetchcmd = data.getVar("CHECKCOMMAND", d, 1) elif os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND", d, 1) else: fetchcmd = data.getVar("FETCHCOMMAND", d, 1) uri = uri.split(";")[0] uri_decoded = list(bb.decodeurl(uri)) uri_type = uri_decoded[0] uri_host = uri_decoded[1] bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri) fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) httpproxy = None ftpproxy = None if uri_type == 'http': httpproxy = data.getVar("HTTP_PROXY", d, True) httpproxy_ignore = (data.getVar("HTTP_PROXY_IGNORE", d, True) or "").split() for p in httpproxy_ignore: if uri_host.endswith(p): httpproxy = None break if uri_type == 'ftp': ftpproxy = data.getVar("FTP_PROXY", d, True) ftpproxy_ignore = (data.getVar("HTTP_PROXY_IGNORE", d, True) or "").split() for p in ftpproxy_ignore: if uri_host.endswith(p): ftpproxy = None break if httpproxy: fetchcmd = "http_proxy=" + httpproxy + " " + fetchcmd if ftpproxy: fetchcmd = "ftp_proxy=" + ftpproxy + " " + fetchcmd bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd) ret = os.system(fetchcmd) if ret != 0: return False # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath) and not checkonly: bb.msg.debug( 2, bb.msg.domain.Fetcher, "The fetch command for %s returned success but %s doesn't exist?..." % (uri, ud.localpath)) return False return True
def localfile(url, d): """Return the filename to cache the checkout in""" (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) #if user sets localpath for file, use it instead. if "localpath" in parm: return parm["localpath"] tag = gettag(parm) return data.expand('git_%s%s_%s.tar.gz' % (host, path.replace('/', '.'), tag), d)
def __init__(self, url, d): self.localfile = "" (self.type, self.host, self.path, self.user, self.pswd, self.parm) = bb.decodeurl(data.expand(url, d)) self.date = Fetch.getSRCDate(self, d) self.url = url self.setup = False for m in methods: if m.supports(url, self, d): self.method = m return raise NoMethodError("Missing implementation for url %s" % url)
def localpath(url, d): (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) #if user sets localpath for file, use it instead. if "localpath" in parm: return parm["localpath"] tag = gettag(parm) localname = data.expand('git_%s%s_%s.tar.gz' % (host, path.replace('/', '.'), tag), d) return os.path.join(data.getVar("DL_DIR", d, 1),data.expand('%s' % (localname), d))
def fetch_uri(uri, ud, d): if checkonly: fetchcmd = data.getVar("CHECKCOMMAND", d, 1) elif os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND", d, 1) else: fetchcmd = data.getVar("FETCHCOMMAND", d, 1) uri = uri.split(";")[0] uri_decoded = list(bb.decodeurl(uri)) uri_type = uri_decoded[0] uri_host = uri_decoded[1] bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri) fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) httpproxy = None ftpproxy = None if uri_type == 'http': httpproxy = data.getVar("HTTP_PROXY", d, True) httpproxy_ignore = (data.getVar("HTTP_PROXY_IGNORE", d, True) or "").split() for p in httpproxy_ignore: if uri_host.endswith(p): httpproxy = None break if uri_type == 'ftp': ftpproxy = data.getVar("FTP_PROXY", d, True) ftpproxy_ignore = (data.getVar("HTTP_PROXY_IGNORE", d, True) or "").split() for p in ftpproxy_ignore: if uri_host.endswith(p): ftpproxy = None break if httpproxy: fetchcmd = "http_proxy=" + httpproxy + " " + fetchcmd if ftpproxy: fetchcmd = "ftp_proxy=" + ftpproxy + " " + fetchcmd bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd) ret = os.system(fetchcmd) if ret != 0: return False # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath) and not checkonly: bb.msg.debug(2, bb.msg.domain.Fetcher, "The fetch command for %s returned success but %s doesn't exist?..." % (uri, ud.localpath)) return False return True
def localpath(url, d): (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) if "localpath" in parm: # if user overrides local path, use it. return parm["localpath"] if not "module" in parm: raise MissingParameterError("svk method needs a 'module' parameter") else: module = parm["module"] if 'rev' in parm: revision = parm['rev'] else: revision = "" date = Fetch.getSRCDate(d) return os.path.join(data.getVar("DL_DIR", d, 1),data.expand('%s_%s_%s_%s_%s.tar.gz' % ( module.replace('/', '.'), host, path.replace('/', '.'), revision, date), d))
def initdata(url, d): fn = bb.data.getVar('FILE', d, 1) if fn not in urldata: urldata[fn] = {} if url not in urldata[fn]: ud = FetchData() (ud.type, ud.host, ud.path, ud.user, ud.pswd, ud.parm) = bb.decodeurl(data.expand(url, d)) ud.date = Fetch.getSRCDate(ud, d) for m in methods: if m.supports(url, ud, d): ud.localpath = m.localpath(url, ud, d) ud.md5 = ud.localpath + '.md5' # if user sets localpath for file, use it instead. if "localpath" in ud.parm: ud.localpath = ud.parm["localpath"] ud.method = m break urldata[fn][url] = ud return urldata[fn][url]
def localpath(url, d): (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) if "localpath" in parm: # if user overrides local path, use it. return parm["localpath"] if not "module" in parm: raise MissingParameterError("cvs method needs a 'module' parameter") else: module = parm["module"] if 'tag' in parm: tag = parm['tag'] else: tag = "" if 'date' in parm: date = parm['date'] else: if not tag: date = Fetch.getSRCDate(d) else: date = "" return os.path.join(data.getVar("DL_DIR", d, 1),data.expand('%s_%s_%s_%s.tar.gz' % ( module.replace('/', '.'), host, tag, date), d))
def go(self, d=bb.data.init(), urls=[]): """Fetch urls""" if not urls: urls = self.urls from copy import deepcopy localdata = deepcopy(d) bb.data.setVar("OVERRIDES", "cvs:%s" % bb.data.getVar("OVERRIDES", localdata), localdata) bb.data.update_data(localdata) for loc in urls: (type, host, path, user, pswd, parm) = bb.decodeurl(bb.data.expand(loc, localdata)) if not "module" in parm: raise MissingParameterError("cvs method needs a 'module' parameter") else: module = parm["module"] dlfile = self.localpath(loc, localdata) dldir = bb.data.getVar("DL_DIR", localdata, 1) # if local path contains the cvs # module, consider the dir above it to be the # download directory # pos = dlfile.find(module) # if pos: # dldir = dlfile[:pos] # else: # dldir = os.path.dirname(dlfile) # setup cvs options options = [] if "tag" in parm: tag = parm["tag"] else: tag = "" if "date" in parm: date = parm["date"] else: if not tag: date = bb.data.getVar("CVSDATE", d, 1) or bb.data.getVar("DATE", d, 1) else: date = "" if "method" in parm: method = parm["method"] else: method = "pserver" if "localdir" in parm: localdir = parm["localdir"] else: localdir = module cvs_rsh = None if method == "ext": if "rsh" in parm: cvs_rsh = parm["rsh"] tarfn = bb.data.expand("%s_%s_%s_%s.tar.gz" % (module.replace("/", "."), host, tag, date), localdata) bb.data.setVar("TARFILES", dlfile, localdata) bb.data.setVar("TARFN", tarfn, localdata) dl = os.path.join(dldir, tarfn) if os.access(dl, os.R_OK): bb.debug(1, "%s already exists, skipping cvs checkout." % tarfn) continue pn = bb.data.getVar("PN", d, 1) cvs_tarball_stash = None if pn: cvs_tarball_stash = bb.data.getVar("CVS_TARBALL_STASH_%s" % pn, d, 1) if cvs_tarball_stash == None: cvs_tarball_stash = bb.data.getVar("CVS_TARBALL_STASH", d, 1) if cvs_tarball_stash: fetchcmd = bb.data.getVar("FETCHCOMMAND_wget", d, 1) uri = cvs_tarball_stash + tarfn bb.note("fetch " + uri) fetchcmd = fetchcmd.replace("${URI}", uri) ret = os.system(fetchcmd) if ret == 0: bb.note("Fetched %s from tarball stash, skipping checkout" % tarfn) continue if date: options.append("-D %s" % date) if tag: options.append("-r %s" % tag) olddir = os.path.abspath(os.getcwd()) os.chdir(bb.data.expand(dldir, localdata)) # setup cvsroot if method == "dir": cvsroot = path else: cvsroot = ":" + method + ":" + user if pswd: cvsroot += ":" + pswd cvsroot += "@" + host + ":" + path bb.data.setVar("CVSROOT", cvsroot, localdata) bb.data.setVar("CVSCOOPTS", " ".join(options), localdata) bb.data.setVar("CVSMODULE", module, localdata) cvscmd = bb.data.getVar("FETCHCOMMAND", localdata, 1) cvsupdatecmd = bb.data.getVar("UPDATECOMMAND", localdata, 1) if cvs_rsh: cvscmd = 'CVS_RSH="%s" %s' % (cvs_rsh, cvscmd) cvsupdatecmd = 'CVS_RSH="%s" %s' % (cvs_rsh, cvsupdatecmd) # create module directory bb.debug(2, "Fetch: checking for module directory") pkg = bb.data.expand("${PN}", d) pkgdir = os.path.join(bb.data.expand("${CVSDIR}", localdata), pkg) moddir = os.path.join(pkgdir, localdir) if os.access(os.path.join(moddir, "CVS"), os.R_OK): bb.note("Update " + loc) # update sources there os.chdir(moddir) myret = os.system(cvsupdatecmd) else: bb.note("Fetch " + loc) # check out sources there bb.mkdirhier(pkgdir) os.chdir(pkgdir) bb.debug(1, "Running %s" % cvscmd) myret = os.system(cvscmd) if myret != 0: try: os.rmdir(moddir) except OSError: pass raise FetchError(module) os.chdir(moddir) os.chdir("..") # tar them up to a defined filename myret = os.system("tar -czf %s %s" % (os.path.join(dldir, tarfn), os.path.basename(moddir))) if myret != 0: try: os.unlink(tarfn) except OSError: pass os.chdir(olddir) del localdata
def go(self, d, urls = []): """Fetch urls""" def md5_sum(parm, d): """ Return the MD5SUM associated with the to be downloaded file. It can return None if no md5sum is associated """ try: return parm['md5sum'] except: return None def verify_md5sum(wanted_sum, got_sum): """ Verify the md5sum we wanted with the one we got """ if not wanted_sum: return True return wanted_sum == got_sum def fetch_uri(uri, basename, dl, md5, parm, d): # the MD5 sum we want to verify wanted_md5sum = md5_sum(parm, d) if os.path.exists(dl): # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND", d, 1) else: fetchcmd = data.getVar("FETCHCOMMAND", d, 1) bb.note("fetch " + uri) fetchcmd = fetchcmd.replace("${URI}", uri) fetchcmd = fetchcmd.replace("${FILE}", basename) bb.debug(2, "executing " + fetchcmd) ret = os.system(fetchcmd) if ret != 0: return False # check if sourceforge did send us to the mirror page dl_dir = data.getVar("DL_DIR", d, True) if not os.path.exists(dl): os.system("rm %s*" % dl) # FIXME shell quote it bb.debug(2,"sourceforge.net send us to the mirror on %s" % basename) return False # supposedly complete.. write out md5sum if bb.which(data.getVar('PATH', d), 'md5sum'): try: md5pipe = os.popen('md5sum ' + dl) md5data = (md5pipe.readline().split() or [ "" ])[0] md5pipe.close() except OSError: md5data = "" # verify the md5sum if not verify_md5sum(wanted_md5sum, md5data): raise MD5SumError(uri) md5out = file(md5, 'w') md5out.write(md5data) md5out.close() return True if not urls: urls = self.urls localdata = data.createCopy(d) data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) for uri in urls: completed = 0 (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(uri, localdata)) basename = os.path.basename(path) dl = self.localpath(uri, d) dl = data.expand(dl, localdata) md5 = dl + '.md5' if os.path.exists(md5): # complete, nothing to see here.. continue premirrors = [ i.split() for i in (data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ] for (find, replace) in premirrors: newuri = uri_replace(uri, find, replace, d) if newuri != uri: if fetch_uri(newuri, basename, dl, md5, parm, localdata): completed = 1 break if completed: continue if fetch_uri(uri, basename, dl, md5, parm, localdata): continue # try mirrors mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ] for (find, replace) in mirrors: newuri = uri_replace(uri, find, replace, d) if newuri != uri: if fetch_uri(newuri, basename, dl, md5, parm, localdata): completed = 1 break if not completed: raise FetchError(uri) del localdata
def supports(url, d): """Check to see if a given url can be fetched with cvs. Expects supplied url in list form, as outputted by bb.decodeurl(). """ (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) return type in ['cvs', 'pserver']
def supports(url, d): """Check to see if a given url can be fetched via bitkeeper. Expects supplied url in list form, as outputted by bb.decodeurl(). """ (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) return type in ["bk"]
def __init__(self, urls=[]): self.urls = [] for url in urls: if self.supports(bb.decodeurl(url), d) is 1: self.urls.append(url)
def supports(url, d): """Check to see if a given url can be fetched using wget. Expects supplied url in list form, as outputted by bb.decodeurl(). """ (type, host, path, user, pswd, parm) = bb.decodeurl(bb.data.expand(url, d)) return type in ["http", "https", "ftp"]
def go(self, d, urls = []): """Fetch urls""" if not urls: urls = self.urls for loc in urls: (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, d)) tag = gettag(parm) gitsrcname = '%s%s' % (host, path.replace('/', '.')) repofile = os.path.join(data.getVar("DL_DIR", d, 1), 'git_%s.tar.gz' % (gitsrcname)) repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname) coname = '%s' % (tag) codir = os.path.join(repodir, coname) cofile = self.localpath(loc, d) # Always update to current if tag=="master" #if os.access(cofile, os.R_OK) and (tag != "master"): if os.access(cofile, os.R_OK): bb.debug(1, "%s already exists, skipping git checkout." % cofile) continue # Still Need to add GIT_TARBALL_STASH Support... # pn = data.getVar('PN', d, 1) # cvs_tarball_stash = None # if pn: # cvs_tarball_stash = data.getVar('CVS_TARBALL_STASH_%s' % pn, d, 1) # if cvs_tarball_stash == None: # cvs_tarball_stash = data.getVar('CVS_TARBALL_STASH', d, 1) # if cvs_tarball_stash: # fetchcmd = data.getVar("FETCHCOMMAND_wget", d, 1) # uri = cvs_tarball_stash + tarfn # bb.note("fetch " + uri) # fetchcmd = fetchcmd.replace("${URI}", uri) # ret = os.system(fetchcmd) # if ret == 0: # bb.note("Fetched %s from tarball stash, skipping checkout" % tarfn) # continue #if os.path.exists(repodir): #prunedir(repodir) bb.mkdirhier(repodir) os.chdir(repodir) #print("Changing to %s" % repodir) if os.access(repofile, os.R_OK): rungitcmd("tar -xzf %s" % (repofile),d) else: rungitcmd("git clone rsync://%s%s %s" % (host, path, repodir),d) rungitcmd("rsync -a --verbose --stats --progress rsync://%s%s/ %s" % (host, path, os.path.join(repodir, ".git", "")),d) #print("Changing to %s" % repodir) os.chdir(repodir) rungitcmd("git pull rsync://%s%s" % (host, path),d) #print("Changing to %s" % repodir) os.chdir(repodir) rungitcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ),d) if os.path.exists(codir): prunedir(codir) #print("Changing to %s" % repodir) bb.mkdirhier(codir) os.chdir(repodir) rungitcmd("git read-tree %s" % (tag),d) rungitcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")),d) #print("Changing to %s" % codir) os.chdir(codir) rungitcmd("tar -czf %s %s" % (cofile, os.path.join(".", "*") ),d)
def go(self, d, urls = []): """Fetch urls""" if not urls: urls = self.urls localdata = data.createCopy(d) data.setVar('OVERRIDES', "svn:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) for loc in urls: (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata)) if not "module" in parm: raise MissingParameterError("svn method needs a 'module' parameter") else: module = parm["module"] dlfile = self.localpath(loc, localdata) dldir = data.getVar('DL_DIR', localdata, 1) # if local path contains the svn # module, consider the dir above it to be the # download directory # pos = dlfile.find(module) # if pos: # dldir = dlfile[:pos] # else: # dldir = os.path.dirname(dlfile) # setup svn options options = [] if 'rev' in parm: revision = parm['rev'] else: revision = "" date = Fetch.getSRCDate(d) if "proto" in parm: proto = parm["proto"] else: proto = "svn" svn_rsh = None if proto == "svn+ssh" and "rsh" in parm: svn_rsh = parm["rsh"] tarfn = data.expand('%s_%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, path.replace('/', '.'), revision, date), localdata) data.setVar('TARFILES', dlfile, localdata) data.setVar('TARFN', tarfn, localdata) # try to use the tarball stash if Fetch.check_for_tarball(d, tarfn, dldir, date): bb.debug(1, "%s already exists or was mirrored, skipping svn checkout." % tarfn) continue olddir = os.path.abspath(os.getcwd()) os.chdir(data.expand(dldir, localdata)) svnroot = host + path data.setVar('SVNROOT', svnroot, localdata) data.setVar('SVNCOOPTS', " ".join(options), localdata) data.setVar('SVNMODULE', module, localdata) svncmd = data.getVar('FETCHCOMMAND', localdata, 1) svncmd = "svn co -r {%s} %s://%s/%s" % (date, proto, svnroot, module) # either use the revision or if SRCDATE is now no braces if revision: svncmd = "svn co -r %s %s://%s/%s" % (revision, proto, svnroot, module) elif date == "now": svncmd = "svn co %s://%s/%s" % (proto, svnroot, module) if svn_rsh: svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd) # create temp directory bb.debug(2, "Fetch: creating temporary directory") bb.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvn.XXXXXX', localdata), localdata) tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") raise FetchError(module) # check out sources there os.chdir(tmpfile) bb.note("Fetch " + loc) bb.debug(1, "Running %s" % svncmd) myret = os.system(svncmd) if myret != 0: try: os.rmdir(tmpfile) except OSError: pass raise FetchError(module) os.chdir(os.path.join(tmpfile, os.path.dirname(module))) # tar them up to a defined filename myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(module))) if myret != 0: try: os.unlink(tarfn) except OSError: pass # cleanup os.system('rm -rf %s' % tmpfile) os.chdir(olddir) del localdata
def go(self, d=bb.data.init(), urls=[]): """Fetch urls""" if not urls: urls = self.urls from copy import deepcopy localdata = deepcopy(d) bb.data.setVar("OVERRIDES", "svn:%s" % bb.data.getVar("OVERRIDES", localdata), localdata) bb.data.update_data(localdata) for loc in urls: (type, host, path, user, pswd, parm) = bb.decodeurl(bb.data.expand(loc, localdata)) if not "module" in parm: raise MissingParameterError("svn method needs a 'module' parameter") else: module = parm["module"] dlfile = self.localpath(loc, localdata) dldir = bb.data.getVar("DL_DIR", localdata, 1) # if local path contains the svn # module, consider the dir above it to be the # download directory # pos = dlfile.find(module) # if pos: # dldir = dlfile[:pos] # else: # dldir = os.path.dirname(dlfile) # setup svn options options = [] if "rev" in parm: revision = parm["rev"] else: revision = "" date = bb.data.getVar("CVSDATE", d, 1) or bb.data.getVar("DATE", d, 1) if "method" in parm: method = parm["method"] else: method = "pserver" if "proto" in parm: proto = parm["proto"] else: proto = "svn" svn_rsh = None if method == "ext": if "rsh" in parm: svn_rsh = parm["rsh"] tarfn = bb.data.expand("%s_%s_%s_%s.tar.gz" % (module.replace("/", "."), host, revision, date), localdata) bb.data.setVar("TARFILES", dlfile, localdata) bb.data.setVar("TARFN", tarfn, localdata) dl = os.path.join(dldir, tarfn) if os.access(dl, os.R_OK): bb.debug(1, "%s already exists, skipping svn checkout." % tarfn) continue svn_tarball_stash = bb.data.getVar("CVS_TARBALL_STASH", d, 1) if svn_tarball_stash: fetchcmd = bb.data.getVar("FETCHCOMMAND_wget", d, 1) uri = svn_tarball_stash + tarfn bb.note("fetch " + uri) fetchcmd = fetchcmd.replace("${URI}", uri) ret = os.system(fetchcmd) if ret == 0: bb.note("Fetched %s from tarball stash, skipping checkout" % tarfn) continue olddir = os.path.abspath(os.getcwd()) os.chdir(bb.data.expand(dldir, localdata)) # setup svnroot # svnroot = ":" + method + ":" + user # if pswd: # svnroot += ":" + pswd svnroot = host + path bb.data.setVar("SVNROOT", svnroot, localdata) bb.data.setVar("SVNCOOPTS", " ".join(options), localdata) bb.data.setVar("SVNMODULE", module, localdata) svncmd = bb.data.getVar("FETCHCOMMAND", localdata, 1) svncmd = "svn co %s://%s/%s" % (proto, svnroot, module) if revision: svncmd = "svn co -r %s %s://%s/%s" % (proto, revision, svnroot, module) if svn_rsh: svncmd = 'svn_RSH="%s" %s' % (svn_rsh, svncmd) # create temp directory bb.debug(2, "Fetch: creating temporary directory") bb.mkdirhier(bb.data.expand("${WORKDIR}", localdata)) bb.data.setVar("TMPBASE", bb.data.expand("${WORKDIR}/oesvn.XXXXXX", localdata), localdata) tmppipe = os.popen(bb.data.getVar("MKTEMPDIRCMD", localdata, 1) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") raise FetchError(module) # check out sources there os.chdir(tmpfile) bb.note("Fetch " + loc) bb.debug(1, "Running %s" % svncmd) myret = os.system(svncmd) if myret != 0: try: os.rmdir(tmpfile) except OSError: pass raise FetchError(module) os.chdir(os.path.join(tmpfile, os.path.dirname(module))) # tar them up to a defined filename myret = os.system("tar -czf %s %s" % (os.path.join(dldir, tarfn), os.path.basename(module))) if myret != 0: try: os.unlink(tarfn) except OSError: pass # cleanup os.system("rm -rf %s" % tmpfile) os.chdir(olddir) del localdata
def go(self, d, urls = []): """Fetch urls""" if not urls: urls = self.urls localdata = data.createCopy(d) data.setVar('OVERRIDES', "svk:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) for loc in urls: (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata)) if not "module" in parm: raise MissingParameterError("svk method needs a 'module' parameter") else: module = parm["module"] dlfile = self.localpath(loc, localdata) dldir = data.getVar('DL_DIR', localdata, 1) # setup svk options options = [] if 'rev' in parm: revision = parm['rev'] else: revision = "" date = Fetch.getSRCDate(d) tarfn = data.expand('%s_%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, path.replace('/', '.'), revision, date), localdata) data.setVar('TARFILES', dlfile, localdata) data.setVar('TARFN', tarfn, localdata) dl = os.path.join(dldir, tarfn) if os.access(dl, os.R_OK): bb.debug(1, "%s already exists, skipping svk checkout." % tarfn) continue olddir = os.path.abspath(os.getcwd()) os.chdir(data.expand(dldir, localdata)) svkroot = host + path data.setVar('SVKROOT', svkroot, localdata) data.setVar('SVKCOOPTS', " ".join(options), localdata) data.setVar('SVKMODULE', module, localdata) svkcmd = "svk co -r {%s} %s/%s" % (date, svkroot, module) if revision: svkcmd = "svk co -r %s/%s" % (revision, svkroot, module) # create temp directory bb.debug(2, "Fetch: creating temporary directory") bb.mkdirhier(data.expand('${WORKDIR}', localdata)) data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata) tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") tmpfile = tmppipe.readline().strip() if not tmpfile: bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") raise FetchError(module) # check out sources there os.chdir(tmpfile) bb.note("Fetch " + loc) bb.debug(1, "Running %s" % svkcmd) myret = os.system(svkcmd) if myret != 0: try: os.rmdir(tmpfile) except OSError: pass raise FetchError(module) os.chdir(os.path.join(tmpfile, os.path.dirname(module))) # tar them up to a defined filename myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(module))) if myret != 0: try: os.unlink(tarfn) except OSError: pass # cleanup os.system('rm -rf %s' % tmpfile) os.chdir(olddir) del localdata
def go(self, d, urls = []): """Fetch urls""" if not urls: urls = self.urls localdata = data.createCopy(d) data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) for loc in urls: (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata)) if not "module" in parm: raise MissingParameterError("cvs method needs a 'module' parameter") else: module = parm["module"] dlfile = self.localpath(loc, localdata) dldir = data.getVar('DL_DIR', localdata, 1) # if local path contains the cvs # module, consider the dir above it to be the # download directory # pos = dlfile.find(module) # if pos: # dldir = dlfile[:pos] # else: # dldir = os.path.dirname(dlfile) # setup cvs options options = [] if 'tag' in parm: tag = parm['tag'] else: tag = "" if 'date' in parm: date = parm['date'] else: if not tag: date = Fetch.getSRCDate(d) else: date = "" if "method" in parm: method = parm["method"] else: method = "pserver" if "localdir" in parm: localdir = parm["localdir"] else: localdir = module cvs_rsh = None if method == "ext": if "rsh" in parm: cvs_rsh = parm["rsh"] tarfn = data.expand('%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, tag, date), localdata) data.setVar('TARFILES', dlfile, localdata) data.setVar('TARFN', tarfn, localdata) dl = os.path.join(dldir, tarfn) if os.access(dl, os.R_OK): bb.debug(1, "%s already exists, skipping cvs checkout." % tarfn) continue # try to use the tarball stash if Fetch.try_mirror(d, tarfn): continue if date: options.append("-D %s" % date) if tag: options.append("-r %s" % tag) olddir = os.path.abspath(os.getcwd()) os.chdir(data.expand(dldir, localdata)) # setup cvsroot if method == "dir": cvsroot = path else: cvsroot = ":" + method + ":" + user if pswd: cvsroot += ":" + pswd cvsroot += "@" + host + ":" + path data.setVar('CVSROOT', cvsroot, localdata) data.setVar('CVSCOOPTS', " ".join(options), localdata) data.setVar('CVSMODULE', module, localdata) cvscmd = data.getVar('FETCHCOMMAND', localdata, 1) cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1) if cvs_rsh: cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) # create module directory bb.debug(2, "Fetch: checking for module directory") pkg=data.expand('${PN}', d) pkgdir=os.path.join(data.expand('${CVSDIR}', localdata), pkg) moddir=os.path.join(pkgdir,localdir) if os.access(os.path.join(moddir,'CVS'), os.R_OK): bb.note("Update " + loc) # update sources there os.chdir(moddir) myret = os.system(cvsupdatecmd) else: bb.note("Fetch " + loc) # check out sources there bb.mkdirhier(pkgdir) os.chdir(pkgdir) bb.debug(1, "Running %s" % cvscmd) myret = os.system(cvscmd) if myret != 0 or not os.access(moddir, os.R_OK): try: os.rmdir(moddir) except OSError: pass raise FetchError(module) os.chdir(moddir) os.chdir('..') # tar them up to a defined filename myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(moddir))) if myret != 0: try: os.unlink(tarfn) except OSError: pass os.chdir(olddir) del localdata
def go(self, d, urls = []): """Fetch urls""" if not urls: urls = self.urls localdata = data.createCopy(d) data.setVar('OVERRIDES', "svn:%s" % data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) for loc in urls: (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata)) if not "module" in parm: raise MissingParameterError("svn method needs a 'module' parameter") else: module = parm["module"] dlfile = self.localpath(loc, localdata) dldir = data.getVar('DL_DIR', localdata, 1) # if local path contains the svn # module, consider the dir above it to be the # download directory # pos = dlfile.find(module) # if pos: # dldir = dlfile[:pos] # else: # dldir = os.path.dirname(dlfile) # setup svn options options = [] if 'rev' in parm: revision = parm['rev'] else: revision = "" date = Fetch.getSRCDate(d) if "proto" in parm: proto = parm["proto"] else: proto = "svn" svn_rsh = None if proto == "svn+ssh" and "rsh" in parm: svn_rsh = parm["rsh"] tarfn = data.expand('%s_%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, path.replace('/', '.'), revision, date), localdata) data.setVar('TARFILES', dlfile, localdata) data.setVar('TARFN', tarfn, localdata) # try to use the tarball stash if Fetch.check_for_tarball(d, tarfn, dldir, date): bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping svn checkout." % tarfn) continue olddir = os.path.abspath(os.getcwd()) os.chdir(data.expand(dldir, localdata)) svnroot = host + path # either use the revision, or SRCDATE in braces, or nothing for SRCDATE = "now" if revision: options.append("-r %s" % revision) elif date != "now": options.append("-r {%s}" % date) data.setVar('SVNROOT', "%s://%s/%s" % (proto, svnroot, module), localdata) data.setVar('SVNCOOPTS', " ".join(options), localdata) data.setVar('SVNMODULE', module, localdata) svncmd = data.getVar('FETCHCOMMAND', localdata, 1) svnupcmd = data.getVar('UPDATECOMMAND', localdata, 1) if svn_rsh: svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd) svnupcmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svnupcmd) pkg=data.expand('${PN}', d) pkgdir=os.path.join(data.expand('${SVNDIR}', localdata), pkg) moddir=os.path.join(pkgdir, module) bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + moddir + "'") if os.access(os.path.join(moddir,'.svn'), os.R_OK): bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc) # update sources there os.chdir(moddir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnupcmd) myret = os.system(svnupcmd) else: bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc) # check out sources there bb.mkdirhier(pkgdir) os.chdir(pkgdir) bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svncmd) myret = os.system(svncmd) if myret != 0: raise FetchError(module) os.chdir(pkgdir) # tar them up to a defined filename myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(module))) if myret != 0: try: os.unlink(tarfn) except OSError: pass os.chdir(olddir) del localdata
def go(self, d=bb.data.init(), urls=[]): """Fetch urls""" def fetch_uri(uri, basename, dl, md5, d): if os.path.exists(dl): # file exists, but we didnt complete it.. trying again.. fetchcmd = bb.data.getVar("RESUMECOMMAND", d, 1) else: fetchcmd = bb.data.getVar("FETCHCOMMAND", d, 1) bb.note("fetch " + uri) fetchcmd = fetchcmd.replace("${URI}", uri) fetchcmd = fetchcmd.replace("${FILE}", basename) bb.debug(2, "executing " + fetchcmd) ret = os.system(fetchcmd) if ret != 0: return False # supposedly complete.. write out md5sum if bb.which(bb.data.getVar("PATH", d), "md5sum"): try: md5pipe = os.popen("md5sum " + dl) md5data = (md5pipe.readline().split() or [""])[0] md5pipe.close() except OSError: md5data = "" md5out = file(md5, "w") md5out.write(md5data) md5out.close() else: md5out = file(md5, "w") md5out.write("") md5out.close() return True if not urls: urls = self.urls from copy import deepcopy localdata = deepcopy(d) bb.data.setVar("OVERRIDES", "wget:" + bb.data.getVar("OVERRIDES", localdata), localdata) bb.data.update_data(localdata) for uri in urls: completed = 0 (type, host, path, user, pswd, parm) = bb.decodeurl(bb.data.expand(uri, localdata)) basename = os.path.basename(path) dl = self.localpath(uri, d) dl = bb.data.expand(dl, localdata) md5 = dl + ".md5" if os.path.exists(md5): # complete, nothing to see here.. continue premirrors = [i.split() for i in (bb.data.getVar("PREMIRRORS", localdata, 1) or "").split("\n") if i] for (find, replace) in premirrors: newuri = uri_replace(uri, find, replace) if newuri != uri: if fetch_uri(newuri, basename, dl, md5, localdata): completed = 1 break if completed: continue if fetch_uri(uri, basename, dl, md5, localdata): continue # try mirrors mirrors = [i.split() for i in (bb.data.getVar("MIRRORS", localdata, 1) or "").split("\n") if i] for (find, replace) in mirrors: newuri = uri_replace(uri, find, replace) if newuri != uri: if fetch_uri(newuri, basename, dl, md5, localdata): completed = 1 break if not completed: raise FetchError(uri) del localdata
def supports(url, d): """Check to see if a given url can be fetched in the local filesystem. Expects supplied url in list form, as outputted by bb.decodeurl(). """ (type, host, path, user, pswd, parm) = bb.decodeurl(bb.data.expand(url, d)) return type in ["file", "patch"]