예제 #1
0
async def fetch_one(instance: str) -> dict:
    timings = {}
    try:
        user_pool_limits = httpx.PoolLimits(soft_limit=10, hard_limit=300)
        network_type = get_network_type(instance)
        async with new_client(pool_limits=user_pool_limits, network_type=network_type) as session:
            # check index with a new connection each time
            print('🏠 ' + instance)
            await request_stat_with_exception(timings, 'index',
                                              session, instance,
                                              REQUEST_COUNT, 20, 40, None)
            # check wikipedia engine with a new connection each time
            print('🔎 ' + instance)
            await request_stat_with_exception(timings, 'search_wp',
                                              session, instance,
                                              REQUEST_COUNT, 30, 60, check_wikipedia_result,
                                              params={'q': '!wp time'})
            # check google engine with a new connection each time
            print('🔍 ' + instance)
            await request_stat_with_exception(timings, 'search_go',
                                              session, instance,
                                              2, 60, 80, check_google_result,
                                              params={'q': '!google time'})
    except RequestErrorException as ex:
        print('❌ {0}: {1}'.format(str(instance), str(ex)))
    except Exception as ex:
        print('❌❌ {0}: unexpected {1} {2}'.format(str(instance), type(ex), str(ex)))
        timings['error'] = exception_to_str(ex)
        traceback.print_exc(file=sys.stdout)
    else:
        print('🏁 {0}'.format(str(instance)))
    return timings
예제 #2
0
async def simple_echo_client_fapi_concurrent_concurrent(ntimes):
    max_conns = 100
    limits = httpx.PoolLimits(hard_limit=max_conns)
    async with httpx.AsyncClient(timeout=600, pool_limits=limits) as client:

        futures = []
        for _ in range(max_conns):
            futures.append(
                client.post(
                    "http://localhost:9999/fapi/echo/simple", json=simple_string
                )
            )

        await asyncio.gather(*futures)
        print("warmed up")

        t1 = time.time()
        futures = []
        for _ in range(ntimes):
            futures.append(
                client.post(
                    "http://localhost:9999/fapi/echo/complex", json=complex_obj.dict()
                )
            )

        await asyncio.gather(*futures)

        return time.time() - t1
예제 #3
0
    def __init__(self, host=None, port=None, sync=True, frontend_url=None):
        super().__init__(host, port)
        self.frontend_url = None
        if self.frontend_url:
            # frontend_url is provided, using frontend
            try:
                res = httpx.get(frontend_url)
                if res.status_code == 200:
                    httpx.PoolLimits(max_keepalive=1, max_connections=1)
                    self.cli = httpx.Client()
                    print(
                        "Attempt connecting to Cluster Serving frontend success"
                    )
                else:
                    raise ConnectionError()
            except Exception as e:
                print(
                    "Connection error, please check your HTTP server. Error msg is ",
                    e)
        else:
            self.output_queue = OutputQueue(host, port)

        # TODO: these params can be read from config in future
        self.input_threshold = 0.6
        self.interval_if_error = 1
예제 #4
0
async def test_pool_timeout(server, backend):
    pool_limits = httpx.PoolLimits(hard_limit=1)
    timeout = httpx.Timeout(pool_timeout=1e-4)

    async with httpx.Client(pool_limits=pool_limits,
                            timeout=timeout) as client:
        async with client.stream("GET", server.url):
            with pytest.raises(httpx.PoolTimeout):
                await client.get("http://localhost:8000/")
예제 #5
0
async def test_connection_closed_free_semaphore_on_acquire(server, restart, backend):
    """
    Verify that max_connections semaphore is released
    properly on a disconnected connection.
    """
    async with httpx.ConnectionPool(pool_limits=httpx.PoolLimits(hard_limit=1)) as http:
        response = await http.request("GET", "http://127.0.0.1:8000/")
        await response.read()

        # Close the connection so we're forced to recycle it
        await restart(server)

        response = await http.request("GET", "http://127.0.0.1:8000/")
        assert response.status_code == 200
async def main():
    client = httpx.AsyncClient(timeout=None,
                               pool_limits=httpx.PoolLimits(soft_limit=10,
                                                            hard_limit=1000))

    orch_name = sys.argv[1]
    num_itr = int(sys.argv[2])
    result_file_name = RESULT_FILE_NAME.format(orch_name, num_itr)
    with open(result_file_name, 'w') as rf:
        rf.write("")
    async_jobs = []
    for _ in range(num_itr):
        async_jobs.append(async_request(client, orch_name, num_itr))

    await asyncio.gather(*async_jobs)  # async
예제 #7
0
async def test_soft_limit(server, backend):
    """
    The soft_limit config should limit the maximum number of keep-alive connections.
    """
    pool_limits = httpx.PoolLimits(soft_limit=1)

    async with httpx.ConnectionPool(pool_limits=pool_limits, backend=backend) as http:
        response = await http.request("GET", "http://127.0.0.1:8000/")
        await response.read()
        assert len(http.active_connections) == 0
        assert len(http.keepalive_connections) == 1

        response = await http.request("GET", "http://localhost:8000/")
        await response.read()
        assert len(http.active_connections) == 0
        assert len(http.keepalive_connections) == 1
예제 #8
0
async def test_connection_pool_closed_close_keepalive_and_free_semaphore(
        server):
    """
    Closing the connection pool should close remaining keepalive connections and
    release the max_connections semaphore.
    """
    http = ConnectionPool(pool_limits=httpx.PoolLimits(hard_limit=1))

    async with http:
        response = await http.request("GET", server.url)
        await response.aread()
        assert response.status_code == 200
        assert len(http.keepalive_connections) == 1

    assert len(http.keepalive_connections) == 0

    # Perform a second round of requests to make sure the max_connections semaphore
    # was released properly.
    async with http:
        response = await http.request("GET", server.url)
        await response.aread()
        assert response.status_code == 200
예제 #9
0
def test_limits_eq():
    limits = httpx.PoolLimits(hard_limit=100)
    assert limits == httpx.PoolLimits(hard_limit=100)
예제 #10
0
def test_limits_repr():
    limits = httpx.PoolLimits(hard_limit=100)
    assert repr(limits) == "PoolLimits(soft_limit=None, hard_limit=100)"
예제 #11
0
import logging
import asyncio

import httpx

from ant_nest.exceptions import ExceptionFilter

# your ant`s class modules or packages
ANT_PACKAGES = ["ants"]
ANT_ENV = os.getenv("ANT_ENV", "development")

# httpx config, see httpx.Client.__init__ for more detail
HTTPX_CONFIG = {
    "timeout": 5.0,
    "max_redirects": 20,
    "pool_limits": httpx.PoolLimits(soft_limit=10, hard_limit=100),
    "trust_env": True,
    "proxies": None,
    "auth": None,
    "headers": None,
    "cookies": None,
}

POOL_CONFIG = {
    "limit": 100,
}
REPORTER = {
    "slot": 60,
}

# ANT config
예제 #12
0
    def __init__(
        self,
        host='localhost',
        port=8086,
        username='******',
        password='******',
        database=None,
        ssl=False,
        verify_ssl=False,
        timeout=None,
        retries=3,
        use_udp=False,
        udp_port=4444,
        proxies=None,
        pool_size=10,
        path='',
        cert=None,
        gzip=False,
        session=None,
        headers=None,
    ):
        """Construct a new InfluxDBClient object."""
        self.__host = host
        self.__port = int(port)
        self._username = username
        self._password = password
        self._database = database
        self._timeout = timeout
        self._retries = retries

        self._verify_ssl = verify_ssl

        self.__use_udp = use_udp
        self.__udp_port = int(udp_port)

        if use_udp:
            self.udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)

        if not path:
            self.__path = ''
        elif path[0] == '/':
            self.__path = path
        else:
            self.__path = '/' + path

        self._scheme = "http"

        if ssl is True:
            self._scheme = "https"

        if proxies is None:
            self._proxies = {}
        else:
            self._proxies = proxies

        if not session:
            pool_limit = httpx.PoolLimits(max_connections=int(pool_size))
            # session = requests.Session()
            session = httpx.AsyncClient(pool_limits=pool_limit,
                                        proxies=self._proxies,
                                        verify=self._verify_ssl,
                                        trust_env=False)

        self._session = session
        # adapter = requests.adapters.HTTPAdapter(
        #     pool_connections=int(pool_size),
        #     pool_maxsize=int(pool_size)
        # )

        # self._session.mount(self._scheme + '://', adapter)

        if cert:
            if not ssl:
                raise ValueError(
                    "Client certificate provided but ssl is disabled.")
            else:
                self._session.cert = cert

        self.__baseurl = "{0}://{1}:{2}{3}".format(self._scheme, self._host,
                                                   self._port, self._path)

        if headers is None:
            headers = {}
        headers.setdefault('Content-Type', 'application/json')
        headers.setdefault('Accept', 'application/x-msgpack')
        self._headers = headers

        self._gzip = gzip
예제 #13
0
    'filename': "log/access.log",
    'mode': 'a'
}
LOGGING["loggers"]["sanic.root"]["handlers"].append("file")
LOGGING["loggers"]["sanic.error"]["handlers"].append("error_file")
LOGGING["loggers"]["sanic.access"]["handlers"].append("access_file")
LOGGING["formatters"]["generic"]["datefmt"] = "%Y-%m-%dT%H:%M:%S%z"
LOGGING["formatters"]["access"]["datefmt"] = "%Y-%m-%dT%H:%M:%S%z"

# Initialize Sanic
app = Sanic("MDClient rewritten in Python!", log_config=LOGGING)
app.config.KEEP_ALIVE_TIMEOUT = 60
app.tls_created_at = None

# Initialise httpx
limits = httpx.PoolLimits(max_keepalive=100, max_connections=1000)
timeout = httpx.Timeout(300)
client = httpx.AsyncClient(verify=False, pool_limits=limits, timeout=timeout)


##
# Cache Async Libraries
##
async def set_async(key, val):
    loop = asyncio.get_running_loop()
    future = loop.run_in_executor(None, cache.set, key, val)
    result = await future
    return result


async def get_async(key):
예제 #14
0
def test_limits_repr():
    limits = httpx.PoolLimits(hard_limit=100)
    assert (repr(limits) ==
            "PoolLimits(soft_limit=None, hard_limit=100, pool_timeout=None)")
예제 #15
0
async def get_stock_prices(secs):
    pool_limits = httpx.PoolLimits(max_keepalive=50, max_connections=HTTP_CONCURRENCY)
    async with httpx.AsyncClient(http2=True, pool_limits=pool_limits) as http:
        prices = await asyncio.gather(*[get_stock_price(http, sec) for sec in secs])
        return [price for price in prices if price]
예제 #16
0
def test_pool_limits_deprecated():
    with pytest.warns(DeprecationWarning):
        httpx.PoolLimits()
예제 #17
0
import aiohttp
import httpx
from fastapi import Depends, FastAPI

from app.dependencies import AiohttpDependency, HTTPXDependency

app = FastAPI(docs_url=None, redoc_url=None, openapi_url=None)

client = httpx.AsyncClient(
    pool_limits=httpx.PoolLimits(soft_limit=10, hard_limit=50000))
session = aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(5.0))
dep_aiohttp = AiohttpDependency(client=session)
dep_httpx = HTTPXDependency(client=client)


@app.get("/aiohttp", dependencies=[Depends(dep_aiohttp)])
async def test_aiohttp():
    return True


@app.get("/httpx", dependencies=[Depends(dep_httpx)])
async def test_httpx():
    return True
예제 #18
0
def test_limits_eq():
    limits = httpx.PoolLimits(max_connections=100)
    assert limits == httpx.PoolLimits(max_connections=100)
예제 #19
0
def test_limits_repr():
    limits = httpx.PoolLimits(max_connections=100)
    assert repr(
        limits) == "PoolLimits(max_keepalive=None, max_connections=100)"