def downloadDeferWorkQueue(urls): """ Uses a DeferWorkQueue to download URLs with dynamic number of concurrent downloads, here we will download 10 concurrently. NOTE: In this case we aren't handling failures like in the others """ ret = {} def _setUrl(url, content): ret[url] = content getPage = lambda url : http.getPage(url, connectionTimeout=30, timeout=30 ).addCallback(lambda content : _setUrl(url, content)) dwq = defer_work_queue.DeferWorkQueue(10) for url in urls: dwq.add(getPage, url) return defer_work_queue.waitForCompletion(dwq).addCallback(lambda _ : ret)
def downloadMapSerial(urls): """ Uses mapSerial to download all urls in serial """ getPage = lambda url: http.getPage(url, connectionTimeout=30, timeout=30 ).addCallback(lambda content: (url, content)) d = defer_utils.mapSerial(getPage, urls) d.addCallback(dict) return d
def downloadMapSerial(urls): """ Uses mapSerial to download all urls in serial """ getPage = lambda url : http.getPage(url, connectionTimeout=30, timeout=30 ).addCallback(lambda content : (url, content)) d = defer_utils.mapSerial(getPage, urls) d.addCallback(dict) return d
def downloadDeferredSerialInline(urls): """ Uses inline callbacks to download urls in serial. Sequential looking code, FTW """ ret = {} for url in urls: content = yield http.getPage(url, connectionTimeout=30, timeout=30) ret[url] = content defer.returnValue(ret)
def _downloadUrl(urlIter): try: url = urlIter.next() downloadDeferred = http.getPage(url, connectionTimeout=30, timeout=30) def _downloaded(contents): ret[url] = contents reactor.callLater(0.0, _downloadUrl, urlIter) downloadDeferred.addCallback(_downloaded) def _error(f): # Something went wrong, fail d.errback(f) downloadDeferred.addErrback(_error) except StopIteration: d.callback(ret)
def downloadDeferWorkQueue(urls): """ Uses a DeferWorkQueue to download URLs with dynamic number of concurrent downloads, here we will download 10 concurrently. NOTE: In this case we aren't handling failures like in the others """ ret = {} def _setUrl(url, content): ret[url] = content getPage = lambda url: http.getPage(url, connectionTimeout=30, timeout=30 ).addCallback(lambda content: _setUrl( url, content)) dwq = defer_work_queue.DeferWorkQueue(10) for url in urls: dwq.add(getPage, url) return defer_work_queue.waitForCompletion(dwq).addCallback(lambda _: ret)
def _download(url): return http.getPage(url, connectionTimeout=30, timeout=30 ).addCallback(lambda content : (url, content))
def _download(url): return http.getPage( url, connectionTimeout=30, timeout=30).addCallback(lambda content: (url, content))