Example #1
0
def downloadDeferWorkQueue(urls):
    """
    Uses a DeferWorkQueue to download URLs with
    dynamic number of concurrent downloads, here we
    will download 10 concurrently.

    NOTE: In this case we aren't handling failures like in the others
    """
    ret = {}

    def _setUrl(url, content):
        ret[url] = content
    
    getPage = lambda url : http.getPage(url,
                                        connectionTimeout=30,
                                        timeout=30
                                        ).addCallback(lambda content : _setUrl(url, content))

    
    dwq = defer_work_queue.DeferWorkQueue(10)

    for url in urls:
        dwq.add(getPage, url)

    return defer_work_queue.waitForCompletion(dwq).addCallback(lambda _ : ret)
Example #2
0
def downloadMapSerial(urls):
    """
    Uses mapSerial to download all urls in serial
    """
    getPage = lambda url: http.getPage(url, connectionTimeout=30, timeout=30
                                       ).addCallback(lambda content:
                                                     (url, content))

    d = defer_utils.mapSerial(getPage, urls)
    d.addCallback(dict)
    return d
Example #3
0
def downloadMapSerial(urls):
    """
    Uses mapSerial to download all urls in serial
    """
    getPage = lambda url : http.getPage(url,
                                        connectionTimeout=30,
                                        timeout=30
                                        ).addCallback(lambda content : (url, content))

    d = defer_utils.mapSerial(getPage, urls)
    d.addCallback(dict)
    return d
Example #4
0
def downloadDeferredSerialInline(urls):
    """
    Uses inline callbacks to download urls in serial.

    Sequential looking code, FTW
    """
    ret = {}
    for url in urls:
        content = yield http.getPage(url, connectionTimeout=30, timeout=30)
        ret[url] = content

    defer.returnValue(ret)
Example #5
0
def downloadDeferredSerialInline(urls):
    """
    Uses inline callbacks to download urls in serial.

    Sequential looking code, FTW
    """
    ret = {}
    for url in urls:
        content = yield http.getPage(url,
                                     connectionTimeout=30,
                                     timeout=30)
        ret[url] = content

    defer.returnValue(ret)
Example #6
0
        def _downloadUrl(urlIter):
            try:
                url = urlIter.next()
                downloadDeferred = http.getPage(url,
                                                connectionTimeout=30,
                                                timeout=30)

                def _downloaded(contents):
                    ret[url] = contents
                    reactor.callLater(0.0, _downloadUrl, urlIter)

                downloadDeferred.addCallback(_downloaded)

                def _error(f):
                    # Something went wrong, fail
                    d.errback(f)

                downloadDeferred.addErrback(_error)
                    
            except StopIteration:
                d.callback(ret)
Example #7
0
        def _downloadUrl(urlIter):
            try:
                url = urlIter.next()
                downloadDeferred = http.getPage(url,
                                                connectionTimeout=30,
                                                timeout=30)

                def _downloaded(contents):
                    ret[url] = contents
                    reactor.callLater(0.0, _downloadUrl, urlIter)

                downloadDeferred.addCallback(_downloaded)

                def _error(f):
                    # Something went wrong, fail
                    d.errback(f)

                downloadDeferred.addErrback(_error)

            except StopIteration:
                d.callback(ret)
Example #8
0
def downloadDeferWorkQueue(urls):
    """
    Uses a DeferWorkQueue to download URLs with
    dynamic number of concurrent downloads, here we
    will download 10 concurrently.

    NOTE: In this case we aren't handling failures like in the others
    """
    ret = {}

    def _setUrl(url, content):
        ret[url] = content

    getPage = lambda url: http.getPage(url, connectionTimeout=30, timeout=30
                                       ).addCallback(lambda content: _setUrl(
                                           url, content))

    dwq = defer_work_queue.DeferWorkQueue(10)

    for url in urls:
        dwq.add(getPage, url)

    return defer_work_queue.waitForCompletion(dwq).addCallback(lambda _: ret)
Example #9
0
 def _download(url):
     return http.getPage(url,
                         connectionTimeout=30,
                         timeout=30
                         ).addCallback(lambda content : (url, content))
Example #10
0
 def _download(url):
     return http.getPage(
         url, connectionTimeout=30,
         timeout=30).addCallback(lambda content: (url, content))