def go(self, uri, ud, d): """Fetch urls""" def fetch_uri(uri, ud, d): if os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND", d, 1) else: fetchcmd = data.getVar("FETCHCOMMAND", d, 1) bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri) fetchcmd = fetchcmd.replace("${URI}", uri) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd) ret = os.system(fetchcmd) if ret != 0: return False # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath): bb.msg.debug( 2, bb.msg.domain.Fetcher, "The fetch command for %s returned success but %s doesn't exist?..." % (uri, ud.localpath), ) return False return True localdata = data.createCopy(d) data.setVar("OVERRIDES", "wget:" + data.getVar("OVERRIDES", localdata), localdata) data.update_data(localdata) premirrors = [i.split() for i in (data.getVar("PREMIRRORS", localdata, 1) or "").split("\n") if i] for (find, replace) in premirrors: newuri = uri_replace(uri, find, replace, d) if newuri != uri: if fetch_uri(newuri, ud, localdata): return if fetch_uri(uri, ud, localdata): return # try mirrors mirrors = [i.split() for i in (data.getVar("MIRRORS", localdata, 1) or "").split("\n") if i] for (find, replace) in mirrors: newuri = uri_replace(uri, find, replace, d) if newuri != uri: if fetch_uri(newuri, ud, localdata): return raise FetchError(uri)
def go(self, uri, ud, d): """Fetch urls""" def fetch_uri(uri, ud, d): if os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND", d, 1) else: fetchcmd = data.getVar("FETCHCOMMAND", d, 1) bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri) fetchcmd = fetchcmd.replace("${URI}", uri) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd) ret = os.system(fetchcmd) if ret != 0: return False # check if sourceforge did send us to the mirror page if not os.path.exists(ud.localpath): os.system("rm %s*" % ud.localpath) # FIXME shell quote it bb.msg.debug(2, bb.msg.domain.Fetcher, "sourceforge.net send us to the mirror on %s" % ud.basename) return False return True localdata = data.createCopy(d) data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) premirrors = [ i.split() for i in (data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ] for (find, replace) in premirrors: newuri = uri_replace(uri, find, replace, d) if newuri != uri: if fetch_uri(newuri, ud, localdata): return if fetch_uri(uri, ud, localdata): return # try mirrors mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ] for (find, replace) in mirrors: newuri = uri_replace(uri, find, replace, d) if newuri != uri: if fetch_uri(newuri, ud, localdata): return raise FetchError(uri)
def go(self, d, urls = []): """Fetch urls""" def md5_sum(parm, d): """ Return the MD5SUM associated with the to be downloaded file. It can return None if no md5sum is associated """ try: return parm['md5sum'] except: return None def verify_md5sum(wanted_sum, got_sum): """ Verify the md5sum we wanted with the one we got """ if not wanted_sum: return True return wanted_sum == got_sum def fetch_uri(uri, basename, dl, md5, parm, d): # the MD5 sum we want to verify wanted_md5sum = md5_sum(parm, d) if os.path.exists(dl): # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND", d, 1) else: fetchcmd = data.getVar("FETCHCOMMAND", d, 1) bb.note("fetch " + uri) fetchcmd = fetchcmd.replace("${URI}", uri) fetchcmd = fetchcmd.replace("${FILE}", basename) bb.debug(2, "executing " + fetchcmd) ret = os.system(fetchcmd) if ret != 0: return False # check if sourceforge did send us to the mirror page dl_dir = data.getVar("DL_DIR", d, True) if not os.path.exists(dl): os.system("rm %s*" % dl) # FIXME shell quote it bb.debug(2,"sourceforge.net send us to the mirror on %s" % basename) return False # supposedly complete.. write out md5sum if bb.which(data.getVar('PATH', d), 'md5sum'): try: md5pipe = os.popen('md5sum ' + dl) md5data = (md5pipe.readline().split() or [ "" ])[0] md5pipe.close() except OSError: md5data = "" # verify the md5sum if not verify_md5sum(wanted_md5sum, md5data): raise MD5SumError(uri) md5out = file(md5, 'w') md5out.write(md5data) md5out.close() return True if not urls: urls = self.urls localdata = data.createCopy(d) data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) for uri in urls: completed = 0 (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(uri, localdata)) basename = os.path.basename(path) dl = self.localpath(uri, d) dl = data.expand(dl, localdata) md5 = dl + '.md5' if os.path.exists(md5): # complete, nothing to see here.. continue premirrors = [ i.split() for i in (data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ] for (find, replace) in premirrors: newuri = uri_replace(uri, find, replace, d) if newuri != uri: if fetch_uri(newuri, basename, dl, md5, parm, localdata): completed = 1 break if completed: continue if fetch_uri(uri, basename, dl, md5, parm, localdata): continue # try mirrors mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ] for (find, replace) in mirrors: newuri = uri_replace(uri, find, replace, d) if newuri != uri: if fetch_uri(newuri, basename, dl, md5, parm, localdata): completed = 1 break if not completed: raise FetchError(uri) del localdata
def go(self, uri, ud, d, checkonly=False): """Fetch urls""" def fetch_uri(uri, ud, d): if checkonly: fetchcmd = data.getVar("CHECKCOMMAND", d, 1) elif os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND", d, 1) else: fetchcmd = data.getVar("FETCHCOMMAND", d, 1) bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri) fetchcmd = fetchcmd.replace("${URI}", uri) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd) ret = os.system(fetchcmd) if ret != 0: return False # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath): bb.msg.debug( 2, bb.msg.domain.Fetcher, "The fetch command for %s returned success but %s doesn't exist?..." % (uri, ud.localpath)) return False return True localdata = data.createCopy(d) data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) premirrors = [ i.split() for i in ( data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ] for (find, replace) in premirrors: newuri = uri_replace(uri, find, replace, d) if newuri != uri: if fetch_uri(newuri, ud, localdata): return True if fetch_uri(uri, ud, localdata): return True # try mirrors mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ] for (find, replace) in mirrors: newuri = uri_replace(uri, find, replace, d) if newuri != uri: if fetch_uri(newuri, ud, localdata): return True raise FetchError(uri)
def go(self, uri, ud, d, checkonly = False): """Fetch urls""" def fetch_uri(uri, ud, d): if checkonly: fetchcmd = data.getVar("CHECKCOMMAND", d, 1) elif os.path.exists(ud.localpath): # file exists, but we didnt complete it.. trying again.. fetchcmd = data.getVar("RESUMECOMMAND", d, 1) else: fetchcmd = data.getVar("FETCHCOMMAND", d, 1) uri = uri.split(";")[0] uri_decoded = list(bb.decodeurl(uri)) uri_type = uri_decoded[0] uri_host = uri_decoded[1] bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri) fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0]) fetchcmd = fetchcmd.replace("${FILE}", ud.basename) httpproxy = None ftpproxy = None if uri_type == 'http': httpproxy = data.getVar("HTTP_PROXY", d, True) httpproxy_ignore = (data.getVar("HTTP_PROXY_IGNORE", d, True) or "").split() for p in httpproxy_ignore: if uri_host.endswith(p): httpproxy = None break if uri_type == 'ftp': ftpproxy = data.getVar("FTP_PROXY", d, True) ftpproxy_ignore = (data.getVar("HTTP_PROXY_IGNORE", d, True) or "").split() for p in ftpproxy_ignore: if uri_host.endswith(p): ftpproxy = None break if httpproxy: fetchcmd = "http_proxy=" + httpproxy + " " + fetchcmd if ftpproxy: fetchcmd = "ftp_proxy=" + ftpproxy + " " + fetchcmd bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd) ret = os.system(fetchcmd) if ret != 0: return False # Sanity check since wget can pretend it succeed when it didn't # Also, this used to happen if sourceforge sent us to the mirror page if not os.path.exists(ud.localpath): bb.msg.debug(2, bb.msg.domain.Fetcher, "The fetch command for %s returned success but %s doesn't exist?..." % (uri, ud.localpath)) return False return True localdata = data.createCopy(d) data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata) data.update_data(localdata) premirrors = [ i.split() for i in (data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ] for (find, replace) in premirrors: newuri = uri_replace(uri, find, replace, d) if newuri != uri: if fetch_uri(newuri, ud, localdata): return True if fetch_uri(uri, ud, localdata): return True # try mirrors mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ] for (find, replace) in mirrors: newuri = uri_replace(uri, find, replace, d) if newuri != uri: if fetch_uri(newuri, ud, localdata): return True raise FetchError(uri)