Exemplo n.º 1
0
async def worker() -> Tuple[int, float]:
    count = 0
    start = now = time.time()
    session = curl_httpclient.CurlAsyncHTTPClient()
    while (now - start < duration):
        resp = await session.fetch(url)
        resp.body
        count += 1
        now = time.time()

    return count, now - start
Exemplo n.º 2
0
    def _get_client_adapter(self):
        """Return an asynchronous HTTP client adapter

        :rtype: :py:class:`tornado.httpclient.AsyncHTTPClient`

        """
        if self._use_curl:
            return curl_httpclient.CurlAsyncHTTPClient(self._ioloop,
                                                       self._max_clients)
        return httpclient.AsyncHTTPClient(max_clients=self._max_clients,
                                          force_instance=True)
Exemplo n.º 3
0
    def get(self):
        content = self.get_query_argument('chl', strip=False)

        url = 'https://chart.googleapis.com/chart?cht=qr&chs=%dx%d&chl=%s&chld=|0'\
              % (200, 200, escape.url_escape('ss://' + content, plus=False))

        request = HTTPRequest(url)

        if options.debug:
            logging.debug("qrcode url: " + url)
            request.proxy_host = '127.0.0.1'
            request.proxy_port = 8123

            client = curl_httpclient.CurlAsyncHTTPClient()
        else:
            client = AsyncHTTPClient()

        response = yield client.fetch(request)

        self.write_png(response.body)
Exemplo n.º 4
0
def async_http_download(source,
                        destination,
                        request_timeout=10 * 60,
                        attempts=3):
    tmp_name = '{}.{}'.format(destination, str(uuid4())[:8])

    try:
        with open(tmp_name, 'w') as iobuffer:

            @gen.coroutine
            def reset_iobuffer(*args):
                iobuffer.seek(0)
                iobuffer.truncate()

            yield fetch_request(
                httpclient.HTTPRequest(
                    source,
                    # Direct write to file.
                    streaming_callback=iobuffer.write,
                    # Timeout for whole request, not tcp.
                    request_timeout=request_timeout,
                ),
                client=curl_httpclient.CurlAsyncHTTPClient(),
                retry_callback=reset_iobuffer,
                attempts=attempts,
            )

    except BaseException:
        if os.path.exists(tmp_name):
            os.unlink(tmp_name)
        raise

    # concurrent downloading
    if os.path.exists(destination):
        os.unlink(tmp_name)
    else:
        os.rename(tmp_name, destination)
Exemplo n.º 5
0
import asyncio
import time

from scrapy.selector import Selector
from tornado import curl_httpclient
from peewee import fn

from core import get_logger, get_database_manager
from models import Proxys
from settings import USE_PROXY, ERR_PREFIX

logger = get_logger()
objects = get_database_manager()
http_client = curl_httpclient.CurlAsyncHTTPClient()


async def async_get_html(url, delay_time=0, headers=None):
    await asyncio.sleep(delay_time)
    try:
        if USE_PROXY:
            proxy = await objects.execute(Proxys.select().where(
                Proxys.score > 0).order_by(fn.Rand()).limit(1))
            if proxy:
                response = await http_client.fetch(url,
                                                   headers=headers,
                                                   proxy_host=proxy[0].ip,
                                                   proxy_port=proxy[0].port)
            else:
                response = await http_client.fetch(url, headers=headers)
        else:
            response = await http_client.fetch(url, headers=headers)