Пример #1
0
async def fetch_all(output, modules):
    loop = asyncio.get_event_loop()
    tasks = []
    fetched_modules = []

    # In case of errors (Too many open files), just change limit_per_host
    connector = TCPConnector(limit=100, limit_per_host=0)

    async with ClientSession(loop=loop,
                             timeout=ClientTimeout(total=TIMEOUT),
                             connector=connector) as client:
        for filename, debug_id, code_file, code_id, has_code in modules:
            tasks.append(fetch_and_write(output, client, filename, debug_id))
            if has_code:
                tasks.append(
                    fetch_and_write(output, client, code_file, code_id))

        res = await asyncio.gather(*tasks)
        res = iter(res)
        for filename, debug_id, code_file, code_id, has_code in modules:
            fetched_pdb = next(res)
            if has_code:
                has_code = next(res)
            if fetched_pdb:
                fetched_modules.append(
                    (filename, debug_id, code_file, code_id, has_code))

    return fetched_modules
Пример #2
0
    async def passthrough(self, request):
        """Make non-mocked network request"""
        connector = TCPConnector()
        connector._resolve_host = partial(self._old_resolver_mock, connector)

        new_is_ssl = ClientRequest.is_ssl
        ClientRequest.is_ssl = self._old_is_ssl
        try:
            original_request = request.clone(
                scheme='https' if request.
                headers['AResponsesIsSSL'] else 'http')

            headers = {
                k: v
                for k, v in request.headers.items() if k != 'AResponsesIsSSL'
            }

            async with ClientSession(connector=connector) as session:
                async with getattr(session, request.method.lower())(
                        original_request.url,
                        headers=headers,
                        data=(await request.read())) as r:
                    headers = {
                        k: v
                        for k, v in r.headers.items()
                        if k.lower() == 'content-type'
                    }
                    text = await r.text()
                    response = self.Response(text=text,
                                             status=r.status,
                                             headers=headers)
                    return response
        finally:
            ClientRequest.is_ssl = new_is_ssl
Пример #3
0
    async def _resolve(self, endpoint: str, host, port, family):
        if family == socket.AF_INET6:
            record_type = RecordType.AAAA
        else:
            record_type = RecordType.A

        params = {
            'ct': 'application/dns-json',
            'name': host,
            'type': record_type.name,
        }

        resolver = self.resolveer_class()
        connector = TCPConnector(resolver=resolver)

        async with CS(connector=connector) as session:
            async with session.get(endpoint, params=params) as resp:
                data = self.json_loads(await resp.text())

        await connector.close()

        if data['Status'] != 0:
            raise OSError("DNS lookup failed")

        return [{
            'hostname': host,
            'host': r['data'],
            'port': port,
            'family': family,
            'proto': 0,
            'flags': socket.AI_NUMERICHOST
        } for r in data['Answer'] if r['type'] in (
            record_type.name,
            record_type.value,
        ) and r['data']]
Пример #4
0
 async def fetch(self, url: str, args: CmdArgs) -> bytes:
     proxy, headers = args.proxy, args.headers
     async with ClientSession(connector=TCPConnector(
             ssl=False)) as client:  # type: ClientSession
         async with client.get(
                 url, proxy=proxy,
                 headers=headers) as resp:  # type: ClientResponse
             return await resp.content.read()
Пример #5
0
def function141(function1293, arg1054, arg38):
    function647 = TCPConnector(loop=arg1054)
    arg38.spy(function647, 'close')
    function1750 = function1293(connector=function647)
    assert (function1750.function647 is function647)
    function1750.close()
    assert function647.close.called
    function647.close()
 def test_connector_loop(self):
     loop = asyncio.new_event_loop()
     connector = TCPConnector(loop=loop)
     with self.assertRaisesRegex(ValueError,
                                 "loop argument must agree with connector"):
         ClientSession(connector=connector, loop=self.loop)
     connector.close()
     loop.close()
Пример #7
0
async def test_connector(create_session, loop, mocker) -> None:
    connector = TCPConnector(loop=loop)
    mocker.spy(connector, 'close')
    session = await create_session(connector=connector)
    assert session.connector is connector

    await session.close()
    assert connector.close.called
    connector.close()
Пример #8
0
def test_connector(create_session, loop, mocker):
    connector = TCPConnector(loop=loop)
    mocker.spy(connector, 'close')
    session = create_session(connector=connector)
    assert session.connector is connector

    session.close()
    assert connector.close.called
    connector.close()
async def test_connector(create_session: Any, loop: Any, mocker: Any) -> None:
    connector = TCPConnector()
    mocker.spy(connector, "close")
    session = await create_session(connector=connector)
    assert session.connector is connector

    await session.close()
    assert connector.close.called
    await connector.close()
Пример #10
0
def test_connector_loop(loop):
    with contextlib.ExitStack() as stack:
        another_loop = asyncio.new_event_loop()
        stack.enter_context(contextlib.closing(another_loop))
        connector = TCPConnector(loop=another_loop)
        stack.enter_context(contextlib.closing(connector))
        with pytest.raises_regexp(ValueError,
                                  "loop argument must agree with connector"):
            ClientSession(connector=connector, loop=loop)
Пример #11
0
def test_connector_loop(loop):
    with contextlib.ExitStack() as stack:
        another_loop = asyncio.new_event_loop()
        stack.enter_context(contextlib.closing(another_loop))
        connector = TCPConnector(loop=another_loop)
        stack.enter_context(contextlib.closing(connector))
        with pytest.raises(ValueError) as ctx:
            ClientSession(connector=connector, loop=loop)
        assert re.match("loop argument must agree with connector",
                        str(ctx.value))
Пример #12
0
def test_connector_loop(loop):
    with contextlib.ExitStack() as stack:
        another_loop = asyncio.new_event_loop()
        stack.enter_context(contextlib.closing(another_loop))
        connector = TCPConnector(loop=another_loop)
        stack.enter_context(contextlib.closing(connector))
        with pytest.raises(RuntimeError) as ctx:
            ClientSession(connector=connector, loop=loop)
        assert re.match("Session and connector has to use same event loop",
                        str(ctx.value))
Пример #13
0
def function522(arg1948):
    with contextlib.ExitStack() as var745:
        var557 = asyncio.new_event_loop()
        var745.enter_context(contextlib.closing(var557))
        function647 = TCPConnector(loop=var557)
        var745.enter_context(contextlib.closing(function647))
        with pytest.raises(RuntimeError) as var4549:
            ClientSession(connector=function647, loop=arg1948)
        assert re.match('Session and connector has to use same event loop',
                        str(var4549.value))
Пример #14
0
 async def fetch(self, url: str, args: CmdArgs) -> bytes:
     if args.proxy != '':
         connector = ProxyConnector.from_url(args.proxy, ssl=False)
     else:
         connector = TCPConnector(ssl=False)
     async with ClientSession(
             connector=connector) as client:  # type: ClientSession
         async with client.get(
                 url, headers=args.headers) as resp:  # type: ClientResponse
             return await resp.content.read()
Пример #15
0
async def get(url, proxy=PROXY):
    connector = SocksConnector.from_url(PROXY, verify_ssl=False)
    if proxy:
        async with aiohttp.ClientSession(connector=connector) as session:
            async with session.get(url, headers={'User-agent':HEADER}) as response:
                return await response.text()
    else:
        async with aiohttp.ClientSession(connector=TCPConnector(verify_ssl=False)) as session:
            async with session.get(url,headers={'User-agent':HEADER}) as response:
                return await response.text()
Пример #16
0
    def __init__(self, config):
        if not isinstance(config, dict):
            raise AioSessionException(
                'Client(config) error\nconfig must a dict')

        self._closed = False
        self._loop = asyncio.get_event_loop() if config.get(
            'loop', None) is None else config.get('loop')
        self._connector = TCPConnector(loop=self._loop)
        self._context = AioSessionContext(config)
        self._context.update({'connector': self._connector})
Пример #17
0
 async def fetch(self, url: str) -> str:
     if self.args.proxy != '':
         connector = ProxyConnector.from_url(self.args.proxy, ssl=False)
     else:
         connector = TCPConnector(ssl=False)
     async with ClientSession(
             connector=connector) as client:  # type: ClientSession
         async with client.get(
                 url,
                 headers=self.args.headers) as resp:  # type: ClientResponse
             return str(resp.url), self.load_raw2text(await resp.read())
Пример #18
0
async def get_hourly_readings_in_time_interval(
        station_codes: List[int],
        start_time_stamp: datetime,
        end_time_stamp: datetime,
        use_cache: bool = True) -> List[WeatherStationHourlyReadings]:
    """ Fetch the hourly observations from WFWX API for the list of station codes provided,
    between the start_time_stamp and end_time_stamp specified.
    """
    async with ClientSession(connector=TCPConnector(limit=10)) as session:
        header = await wfwx_api.get_auth_header(session)
        return await wfwx_api.get_hourly_readings(session, header,
                                                  station_codes,
                                                  start_time_stamp,
                                                  end_time_stamp, use_cache)
Пример #19
0
    def __init__(self, rest_service_name='GenericService', spec=None, plugins=None, config=None,
                 parser=None, serializer=None, base_path='', loop=None, logger=None):
        self._plugins = []

        self.logger = logger or logging.getLogger('serviceClient.{}'.format(rest_service_name))
        self.rest_service_name = rest_service_name
        self.spec = spec or {}
        self.add_plugins(plugins or [])
        self.config = config or {}
        self.parser = parser or (lambda x, *args, **kwargs: x)
        self.serializer = serializer or (lambda x, *args, **kwargs: x)
        self.base_path = base_path
        self.loop = loop or get_event_loop()

        self.connector = TCPConnector(loop=self.loop, **self.config.get('connector', {}))
        self.session = ClientSession(connector=self.connector, loop=self.loop)
Пример #20
0
 def __init__(self,
              *,
              loop,
              verify_ssl=True,
              ca_certificate_file=None,
              proxy=None):
     self.loop = loop
     ssl_context = None
     if ca_certificate_file is not None:
         ssl_context = ssl.create_default_context()
         ssl_context.load_verify_locations(cafile=ca_certificate_file)
     connector = TCPConnector(loop=loop,
                              verify_ssl=verify_ssl,
                              ssl_context=ssl_context)
     self.session = ClientSession(loop=loop, connector=connector)
     self.proxy = proxy
Пример #21
0
def ClientSession(*args, **kwargs) -> CS:  # noqa
    """Shortcut of aiohttp.ClientSession and DNSOverHTTPSResolver"""

    endpoints = kwargs.pop(
        'endpoints',
        [
            'https://dns.google.com/resolve',
            'https://cloudflare-dns.com/dns-query',
        ],
    )
    json_loads = kwargs.pop('json_loads', json.loads)
    resolver_class = kwargs.pop('resolver_class', None)
    resolver = DNSOverHTTPSResolver(
        endpoints=endpoints,
        json_loads=json_loads,
        resolver_class=resolver_class,
    )
    connector = TCPConnector(resolver=resolver)

    return CS(*args, **kwargs, connector=connector)
Пример #22
0
async def get_hourly_readings(
        station_codes: List[int],
        time_of_interest: datetime) -> List[WeatherStationHourlyReadings]:
    """ Get the hourly readings for the list of station codes provided.
    Reading 5 days before, and 10 days after the time of interest are returned.
    Depending on configuration, will read from WF1 or from local database.
    """
    start_time_stamp, end_time_stamp = _get_time_interval(time_of_interest)

    if wfwx_api.use_wfwx():
        # Limit the number of concurrent connections.
        async with ClientSession(connector=TCPConnector(limit=10)) as session:
            header = await wfwx_api.get_auth_header(session)
            return await wfwx_api.get_hourly_readings(session, header,
                                                      station_codes,
                                                      start_time_stamp,
                                                      end_time_stamp)

    return await fetch_hourly_readings_from_db(station_codes, start_time_stamp,
                                               end_time_stamp)
Пример #23
0
    def __init__(
        self,
        host,
        username,
        password,
        authentication_method,
        session: ClientSession = None,
        ssl=False,
        verify_ssl=True,
    ):
        """
        Create a SagemCom client.

        :param host: the host of your Sagemcom router
        :param username: the username for your Sagemcom router
        :param password: the password for your Sagemcom router
        :param authentication_method: the auth method of your Sagemcom router
        :param session: use a custom session, for example to configure the timeout
        """
        self.host = host
        self.username = username
        self.authentication_method = authentication_method
        self._password_hash = self.__generate_hash(password)

        self.protocol = "https" if ssl else "http"

        self._current_nonce = None
        self._server_nonce = ""
        self._session_id = 0
        self._request_id = -1

        self.session = (session if session else ClientSession(
            headers={"User-Agent": f"{DEFAULT_USER_AGENT}/{__version__}"},
            timeout=ClientTimeout(DEFAULT_TIMEOUT),
            connector=TCPConnector(ssl=verify_ssl),
        ))
Пример #24
0
async def collect(modules):
    loop = asyncio.get_event_loop()
    tasks = []

    # In case of errors (Too many open files), just change limit_per_host
    connector = TCPConnector(limit=100, limit_per_host=4)

    async with ClientSession(loop=loop,
                             timeout=ClientTimeout(total=TIMEOUT),
                             connector=connector) as client:
        for filename, ids in modules.items():
            for debug_id, code_file, code_id in ids:
                tasks.append(
                    collect_info(client, filename, debug_id, code_file,
                                 code_id))

        res = await asyncio.gather(*tasks)
    to_dump = []
    stats = {"no_pdb": 0, "is_there": 0}
    for filename, debug_id, code_file, code_id, has_pdb, has_code, is_there in res:
        if not has_pdb:
            if is_there:
                stats["is_there"] += 1
            else:
                stats["no_pdb"] += 1
                log.info(f"No pdb for {filename}/{debug_id}")
            continue

        log.info(
            f"To dump: {filename}/{debug_id}, {code_file}/{code_id} and has_code = {has_code}"
        )
        to_dump.append((filename, debug_id, code_file, code_id, has_code))

    log.info(f"Collected {len(to_dump)} files to dump")

    return to_dump, stats
Пример #25
0
def test_connector(create_session, loop):
    connector = TCPConnector(loop=loop)
    session = create_session(connector=connector)
    assert session.connector is connector
Пример #26
0
 async def fetch(self, url: str) -> str:
     proxy, headers = self.args.proxy, self.args.headers
     async with ClientSession(connector=TCPConnector(ssl=False)) as client: # type: ClientSession
         async with client.get(url, proxy=proxy, headers=headers) as resp: # type: ClientResponse
             return await resp.text(encoding='utf-8')
Пример #27
0
def _populate(database_url, legacy_database_url, dpayd_http_url, start_block, end_block,accounts_file):
    CONNECTOR = TCPConnector(loop=loop, limit=100)
    AIOHTTP_SESSION = aiohttp.ClientSession(loop=loop,
                                            connector=CONNECTOR,
                                            json_serialize=json.dumps,
                                            headers={'Content-Type': 'application/json'})
    DB_META = task_load_db_meta(legacy_database_url)

    try:

        pool = create_asyncpg_pool(database_url)
        task_num = 0
        # [1/7] confirm db connectivity
        task_num += 1
        task_message = fmt_task_message(
            'Confirm database connectivity',
            emoji_code_point=u'\U0001F4DE',
            task_num=task_num)
        click.echo(task_message)
        #task_confirm_db_connectivity(database_url)

        # [2/7] init db if required
        task_num += 1
        task_message = fmt_task_message(
            'Initialising db if required',
            emoji_code_point=u'\U0001F50C',
            task_num=task_num)
        click.echo(task_message)
        task_init_db_if_required(database_url=database_url)

        # [3/7] find last irreversible block
        task_num += 1
        if end_block == -1:
            task_message = fmt_task_message(
            'Finding highest blockchain block',
            emoji_code_point='\U0001F50E',
            task_num=task_num)
            click.echo(task_message)
            last_chain_block_num = loop.run_until_complete(
                get_last_irreversible_block_num(dpayd_http_url, AIOHTTP_SESSION))
            end_block = last_chain_block_num
            success_msg = fmt_success_message(
                'last irreversible block number is %s',last_chain_block_num )
            click.echo(success_msg)
        else:
            task_message = fmt_task_message(
                f'Using --end_block {end_block} as highest blockchain block to load',
                emoji_code_point='\U0001F50E',
                task_num=task_num)
            click.echo(task_message)

        # [4/7] build list of blocks missing from db
        existing_count, missing_count, range_count = loop.run_until_complete(get_existing_and_missing_count(pool, start_block, end_block))
        task_message = fmt_task_message(
            f'Building list of {missing_count} blocks missing from db between {start_block}<<-->>{end_block}' ,
            emoji_code_point=u'\U0001F52D',
            task_num=4)
        click.echo(task_message)


        with click.progressbar(length=end_block, **pbar_kwargs) as pbar:
            pbar.update(existing_count)
            missing_block_nums = loop.run_until_complete(
                collect_missing_block_nums(
                    pool,
                    end_block,
                    missing_count,
                    start_block=start_block,
                    pbar=pbar))

        # [5.1/7] preload accounts file
        if accounts_file:
            task_message = fmt_task_message(
                'Preloading account names',
                emoji_code_point=u'\U0001F52D',
                task_num=5)
            click.echo(task_message)
            with open(accounts_file) as f:
                account_names = json.load(f)
            loop.run_until_complete(
                preload_account_names(pool, account_names))
            del account_names


        # [5/7] add missing blocks and operations
        task_message = fmt_task_message(
            'Adding missing blocks and operations to db',
            emoji_code_point=u'\U0001F52D',
            task_num=5)
        click.echo(task_message)




        blocks_progress_bar = tqdm(initial=existing_count,
                                   total=range_count,
                                   bar_format='{bar}| [{rate_fmt}{postfix}]',
                                   ncols=48,
                                   dynamic_ncols=False,
                                   unit=' blocks',
                                   )
        ops_progress_bar = tqdm(initial=existing_count * 50,
                                total=range_count * 50,
                                bar_format='{bar}| [{rate_fmt}{postfix}]',
                                ncols=48,
                                dynamic_ncols=False,
                                unit='    ops')

        loop.run_until_complete(process_blocks(missing_block_nums,
                                             dpayd_http_url,
                                             AIOHTTP_SESSION,
                                             pool,
                                             DB_META,
                                             blocks_pbar=blocks_progress_bar,
                                             ops_pbar=ops_progress_bar))

        # [6/7] Make second sweep for missing blocks
        task_message = fmt_task_message(
            'Adding missing blocks and operations to db (second sweep)',
            emoji_code_point=u'\U0001F52D',
            task_num=6)
        click.echo(task_message)

        existing_count, missing_count, range_count = loop.run_until_complete(
            get_existing_and_missing_count(pool, start_block, end_block))
        task_message = fmt_task_message(
            f'Building list of {missing_count} blocks missing from db between {start_block}<<-->>{end_block}',
            emoji_code_point=u'\U0001F52D',
            task_num=4)
        click.echo(task_message)

        with click.progressbar(length=end_block, **pbar_kwargs) as pbar:
            pbar.update(existing_count)
            missing_block_nums = loop.run_until_complete(
                collect_missing_block_nums(
                    pool,
                    end_block,
                    missing_count,
                    start_block=start_block,
                    pbar=pbar))


        blocks_progress_bar = tqdm(initial=existing_count,
                                   total=range_count,
                                   dynamic_ncols=False,
                                   unit=' blocks',
                                   )
        ops_progress_bar = tqdm(initial=existing_count * 50,
                                total=range_count * 50,
                                dynamic_ncols=False,
                                unit='    ops')
        loop.run_until_complete(process_blocks(missing_block_nums,
                                               dpayd_http_url,
                                               AIOHTTP_SESSION,
                                               pool,
                                               DB_META,
                                               blocks_pbar=blocks_progress_bar,
                                               ops_pbar=ops_progress_bar))



        # [7/7] stream new blocks
        task_message = fmt_task_message(
            'Streaming blocks', emoji_code_point=u'\U0001F4DD',
            task_num=7)
        click.echo(task_message)

    except KeyboardInterrupt:
        pass
    except Exception as e:
        logger.exception('ERROR')
        raise e
Пример #28
0
 def __init__(self, connection_limit=10):
     connector = TCPConnector(limit=connection_limit)
     self.con = ClientSession(connector=connector, headers=self.headers)
Пример #29
0
 async def make_connector():
     return TCPConnector()
 def test_connector(self):
     connector = TCPConnector(loop=self.loop)
     session = ClientSession(connector=connector, loop=self.loop)
     self.assertIs(session.connector, connector)
     session.close()