async def test_host_without_auth_in_header(): url = "http://*****:*****@example.org:80/echo_headers" client = Client(dispatch=MockDispatch()) response = await client.get(url) assert response.status_code == 200 assert response.json() == { "headers": { "accept": "*/*", "accept-encoding": "gzip, deflate, br", "connection": "keep-alive", "host": "example.org:80", "user-agent": f"python-httpx/{__version__}", "authorization": "Basic dXNlcm5hbWU6cGFzc3dvcmQ=", } }
def __init__(cls, *args, **kwargs): # get all endpoints including wrapped by staticmethod or classmethod cls._end_points = inspect.getmembers( cls, lambda val: (isinstance(val, Endpoint) or getattr( val, '__is_endpoint__', False))) if cls.is_async and not getattr(cls, 'client', None): cls.client = AsyncClient() else: cls.client = Client() for name, endpoint in cls._end_points: is_staticmethod = isinstance(cls.__dict__[name], staticmethod) is_classmethod = isinstance(cls.__dict__[name], classmethod) is_method = not (is_staticmethod or is_classmethod) if not is_staticmethod: endpoint.rebuild_dependant(is_method, is_classmethod) endpoint.client = cls.client super().__init__(*args, **kwargs)
def test_http2_reconnect(): """ If a connection has been dropped between requests, then we should be seemlessly reconnected. """ backend = MockHTTP2Backend(app=app) with Client(backend=backend) as client: response_1 = client.get("http://example.org/1") backend.server.raise_disconnect = True response_2 = client.get("http://example.org/2") assert response_1.status_code == 200 assert json.loads(response_1.content) == {"method": "GET", "path": "/1", "body": ""} assert response_2.status_code == 200 assert json.loads(response_2.content) == {"method": "GET", "path": "/2", "body": ""}
def test_cookie_persistence(): """ Ensure that Client instances persist cookies between requests. """ with Client(dispatch=MockDispatch()) as client: response = client.get("http://example.org/echo_cookies") assert response.status_code == 200 assert response.json() == {"cookies": None} response = client.get("http://example.org/set_cookie") assert response.status_code == 200 assert response.cookies["example-name"] == "example-value" assert client.cookies["example-name"] == "example-value" response = client.get("http://example.org/echo_cookies") assert response.status_code == 200 assert response.json() == {"cookies": "example-name=example-value"}
def viewerIsOrgMember(client: httpx.Client, org: str) -> Optional[bool]: ql = """query($org: String!){ organization(login:$org){ viewerIsAMember } }""" result = client.post(QL_ENDPOINT, json={"query": ql, "variables": {"org": org}}) if result.status_code == 200: body: Dict = result.json() if body.get("errors", None) is not None: print(body["errors"]) return False else: org: Dict[str, Any] = body["data"]["organization"] return org["viewerIsAMember"] return None
async def test_tutor_signup_with_email(client: httpx.Client): signup_func = lambda: client.post( "/signup", json={ "full_name": "Danny::Novak", "email": "*****@*****.**", "gender": "M", "dob": "1991-12-16", # year-month-day "password": "******", # sent in addition to signup info to skip email verification. "signup_info": { "verification": None }, }, ) async with models.User.database: await clean_db() response = await signup_func() assert response.status_code == 200 result: dict = response.json() access_token = result["data"]["access_token"] # decode jwt token user_data = jwt.decode(access_token, verify=False) user_info = { "email": "*****@*****.**", "gender": "M", "full_name": "Danny::Novak", "first_name": "Danny", "birthday": "1991-12-16", } for key in user_info.keys(): assert user_data[key] == user_info[key] record = await models.User.objects.get(email=user_data["email"]) assert record.email == "*****@*****.**" assert record.gender.value == "M" assert not record.email_verified # second attempt to signup should result in an error. response = await signup_func() assert response.status_code == 400 assert response.json() == { "status": False, "errors": { "email": ["value_error.duplicate"] }, }
def get_profile(request: ScraperRequest, client: httpx.Client = DEFAULT_CLIENT): slug = request.get_url().rsplit("/", 1)[-1] prof = request.get_url().rsplit("/", 2)[-2] if prof in ["pharmacien", "medecin"]: # pragma: no cover base_url = ORDOCLIC_API.get("profile_professionals").format(slug=slug) else: base_url = ORDOCLIC_API.get("profile_public_entities").format(slug=slug) request.increase_request_count("booking") try: r = client.get(base_url) r.raise_for_status() except httpx.TimeoutException as hex: logger.warning(f"request timed out for center: {base_url}") return False except httpx.HTTPStatusError as hex: logger.warning(f"{base_url} returned error {hex.response.status_code}") return None return r.json()
async def test_header_merge(): url = "http://example.org/echo_headers" client_headers = {"User-Agent": "python-myclient/0.2.1"} request_headers = {"X-Auth-Token": "FooBarBazToken"} client = Client(dispatch=MockDispatch(), headers=client_headers) response = await client.get(url, headers=request_headers) assert response.status_code == 200 assert response.json() == { "headers": { "accept": "*/*", "accept-encoding": "gzip, deflate, br", "connection": "keep-alive", "host": "example.org", "user-agent": "python-myclient/0.2.1", "x-auth-token": "FooBarBazToken", } }
def get_profile(request: ScraperRequest, client: httpx.Client = DEFAULT_CLIENT): slug = request.get_url().rsplit("/", 1)[-1] prof = request.get_url().rsplit("/", 2)[-2] if prof in ["pharmacien", "medecin"]: # pragma: no cover base_url = f"https://api.ordoclic.fr/v1/professionals/profile/{slug}" else: base_url = f"https://api.ordoclic.fr/v1/public/entities/profile/{slug}" try: r = client.get(base_url) r.raise_for_status() except httpx.TimeoutException as hex: logger.warning(f"request timed out for center: {base_url}") return False except httpx.HTTPStatusError as hex: logger.warning(f"{base_url} returned error {hex.response.status_code}") return None return r.json()
def test_header_merge_conflicting_headers(): url = "http://example.org/echo_headers" client_headers = {"X-Auth-Token": "FooBar"} request_headers = {"X-Auth-Token": "BazToken"} with Client(dispatch=MockDispatch(), headers=client_headers) as client: response = client.get(url, headers=request_headers) assert response.status_code == 200 assert response.json() == { "headers": { "accept": "*/*", "accept-encoding": "gzip, deflate, br", "connection": "keep-alive", "host": "example.org", "user-agent": f"python-httpx/{__version__}", "x-auth-token": "BazToken", } }
def getSlots(entityId, medicalStaffId, reasonId, start_date, end_date, client: httpx.Client = DEFAULT_CLIENT): base_url = 'https://api.ordoclic.fr/v1/solar/slots/availableSlots' payload = { "entityId": entityId, "medicalStaffId": medicalStaffId, "reasonId": reasonId, "dateEnd": f"{end_date}T00:00:00.000Z", "dateStart": f"{start_date}T23:59:59.000Z" } headers = {'Content-type': 'application/json', 'Accept': 'text/plain'} r = client.post(base_url, data=json.dumps(payload), headers=headers) r.raise_for_status() return r.json()
def get_reasons(entityId, client: httpx.Client = DEFAULT_CLIENT, request: ScraperRequest = None): base_url = ORDOCLIC_API.get("motives").format(entityId=entityId) if request: request.increase_request_count("motives") try: r = client.get(base_url) r.raise_for_status() except httpx.TimeoutException as hex: logger.warning(f"request timed out for center: {base_url}") if request: request.increase_request_count("time-out") return None except httpx.HTTPStatusError as hex: logger.warning(f"{base_url} returned error {hex.response.status_code}") if request: request.increase_request_count("error") return None return r.json()
def _get_anime_info(anilist_id: int) -> Any: """获取动画信息 :param anilist_id: id :return: dict """ # Define our query variables and values that will be used in the query request variables = {"id": anilist_id} url = "https://trace.moe/anilist/" transport = HTTPTransport(verify=False, retries=3) with Client(timeout=10.0, transport=transport) as client: return client.post(url, json={ "query": ANIME_INFO_QUERY, "variables": variables }).json()["data"]["Media"]
class Requests: """Client for requests""" def __init__(self): self._client = Client() def post( self, *, url: str, params: Optional[Dict] = None, body: Optional[Dict] = None, data: Optional[Dict] = None, files: Optional[Dict] = None, ) -> Response: """Send post request""" return self._client.post(url=url, params=params, json=body, data=data, files=files)
async def test_host_with_non_default_port_in_url(): """ If the URL includes a non-default port, then it should be included in the Host header. """ url = "http://*****:*****@example.org:123/echo_headers" client = Client(dispatch=MockDispatch()) response = await client.get(url) assert response.status_code == 200 assert response.json() == { "headers": { "accept": "*/*", "accept-encoding": "gzip, deflate, br", "connection": "keep-alive", "host": "example.org:123", "user-agent": f"python-httpx/{__version__}", "authorization": "Basic dXNlcm5hbWU6cGFzc3dvcmQ=", } }
def get_next_slot_date(center_id: str, consultation_reason_name: str, start_date: str, client: httpx.Client = DEFAULT_CLIENT, request: ScraperRequest = None) -> Optional[str]: url = MAIIA_API.get("next_slot").format( center_id=center_id, consultation_reason_name=consultation_reason_name, start_date=start_date) if request: request.increase_request_count("next-slots") try: r = client.get(url) r.raise_for_status() except httpx.HTTPStatusError as hex: logger.warning(f"{url} returned error {hex.response.status_code}") return None result = r.json() if "firstPhysicalStartDateTime" in result: return result["firstPhysicalStartDateTime"] return None
def create_quiz_api(token: str, *, async_mode:bool=False): """ Creates and returns a `QuizAPI` object with an empty session Example: ```py quiz_api = create_quiz_api("token") quiz = quiz_api.get_quiz(category="linux") print(quiz) ``` Async Example: ```py quiz_api = create_quiz_api("token", async_mode=False) quiz = await quiz_api.get_quiz(category="linux") print(quiz) ``` """ headers = {'X-Api-Key': token} session = AsyncClient(headers=headers) if async_mode else Client(headers=headers) return QuizAPI(session=session)
def test_client_header(): """ Set a header in the Client. """ url = "http://example.org/echo_headers" headers = {"Example-Header": "example-value"} with Client(dispatch=MockDispatch(), headers=headers) as client: response = client.get(url) assert response.status_code == 200 assert response.json() == { "headers": { "accept": "*/*", "accept-encoding": "gzip, deflate, br", "connection": "keep-alive", "example-header": "example-value", "host": "example.org", "user-agent": f"python-httpx/{__version__}", } }
def get_organization_slug( slug: str, client: httpx.Client = DEFAULT_CLIENT, request: ScraperRequest = None ) -> Optional[dict]: url = str(AVECMONDOC_API.get("get_organization_slug", "")).format(slug=slug) try: r = client.get(url) r.raise_for_status() except httpx.TimeoutException as hex: logger.warning(f"request timed out for center: {url} (get_slug)") if request: request.increase_request_count("time-out") return None except httpx.HTTPStatusError as hex: logger.warning(f"{url} returned error {hex.response.status_code}") logger.warning(r.content) if request: request.increase_request_count("error") return None if request: request.increase_request_count("cabinets") return r.json()
async def test_http2_settings_in_handshake(backend): backend = MockHTTP2Backend(app=app, backend=backend) async with Client(backend=backend, http2=True) as client: await client.get("http://example.org") h2_conn = backend.server.conn assert isinstance(h2_conn, h2.connection.H2Connection) expected_settings = { SettingCodes.HEADER_TABLE_SIZE: 4096, SettingCodes.ENABLE_PUSH: 0, SettingCodes.MAX_CONCURRENT_STREAMS: 100, SettingCodes.INITIAL_WINDOW_SIZE: 65535, SettingCodes.MAX_FRAME_SIZE: 16384, SettingCodes.MAX_HEADER_LIST_SIZE: 65536, # This one's here because h2 helpfully populates remote_settings # with default values even if the peer doesn't send the setting. SettingCodes.ENABLE_CONNECT_PROTOCOL: 0, } assert dict(h2_conn.remote_settings) == expected_settings # We don't expect the ENABLE_CONNECT_PROTOCOL to be in the handshake expected_settings.pop(SettingCodes.ENABLE_CONNECT_PROTOCOL) assert len(backend.server.settings_changed) == 1 settings = backend.server.settings_changed[0] assert isinstance(settings, h2.events.RemoteSettingsChanged) assert len(settings.changed_settings) == len(expected_settings) for setting_code, changed_setting in settings.changed_settings.items(): assert isinstance(changed_setting, h2.settings.ChangedSetting) assert changed_setting.new_value == expected_settings[setting_code]
def request_new_grant_with_post_scb_special( self, url: str, data, grant_name: str, client: httpx.Client) -> Tuple[str, int]: with client: header = { "Content-Type": "application/json", "resourceOwnerId": self.client_id, "requestUId": uuid.uuid4().hex, "accept-language": "EN", } response = client.post(url, json=data, headers=header) if response.is_error: # As described in https://tools.ietf.org/html/rfc6749#section-5.2 raise InvalidGrantRequest(response) content = response.json().get("data") token = content.get(grant_name) if not token: raise GrantNotProvided(grant_name, content) return token, content.get("expiresIn")
def get_reasons( organization_id: int, doctor_id: int, client: httpx.Client = DEFAULT_CLIENT, request: ScraperRequest = None ) -> Optional[list]: url = AVECMONDOC_API.get("get_reasons", "").format(id=id) payload = {"params": json.dumps({"organizationId": organization_id, "doctorId": doctor_id})} try: r = client.get(url, params=payload) r.raise_for_status() except httpx.TimeoutException as hex: logger.warning(f"request timed out for center: {url} (get_reasons)") if request: request.increase_request_count("time-out") return None except httpx.HTTPStatusError as hex: logger.warning(f"{url} returned error {hex.response.status_code}") logger.warning(r.content) if request: request.increase_request_count("error") return None if request: request.increase_request_count("motives") return r.json()
async def test_host_with_auth_and_port_in_url(): """ The Host header should only include the hostname, or hostname:port (for non-default ports only). Any userinfo or default port should not be present. """ url = "http://*****:*****@example.org:80/echo_headers" client = Client(dispatch=MockDispatch()) response = await client.get(url) assert response.status_code == 200 assert response.json() == { "headers": { "accept": "*/*", "accept-encoding": "gzip, deflate, br", "connection": "keep-alive", "host": "example.org", "user-agent": f"python-httpx/{__version__}", "authorization": "Basic dXNlcm5hbWU6cGFzc3dvcmQ=", } }
def get_slots( campagneId: str, optionId: str, start_date: str, client: httpx.Client = DEFAULT_CLIENT, request: ScraperRequest = None, ) -> dict: base_url = MAPHARMA_API.get("slots").format(campagneId=campagneId, start_date=start_date, optionId=optionId) if request: request.increase_request_count("slots") try: r = client.get(base_url) r.raise_for_status() except httpx.TimeoutException as hex: logger.warning(f"{base_url} timed out {hex}") return {} except httpx.HTTPStatusError as hex: logger.warning(f"{base_url} returned error {hex.response.status_code}") return {} return r.json()
def post_method(url: str, postdata=None, postjson=None, headers: dict = None, timeout=5, max_retries=5, c: httpx.Client = None): """ timeout: 超时时间, 单位秒(s), 默认为 5 秒, 为 `None` 时禁用 max_retries: 最大尝试次数, 默认为 5 次, 为 0 时禁用 """ k = 1 while (k <= max_retries) or (max_retries == 0): try: if c is not None: res = c.post(url, data=postdata, json=postjson, headers=headers, timeout=timeout) else: res = httpx.post(url, data=postdata, json=postjson, headers=headers, timeout=timeout) except Exception as e: k = k + 1 print(sys._getframe().f_code.co_name + ": " + str(e)) time.sleep(1) continue else: break try: return res except Exception: sys.exit(sys._getframe().f_code.co_name + ": " + "Max retries exceeded")
def get_paged( url: str, limit: MAIIA_LIMIT, client: httpx.Client = DEFAULT_CLIENT, request: ScraperRequest = None, request_type: str = None, ) -> dict: result = dict() result["items"] = [] result["total"] = 0 page = 0 loops = 0 while loops <= result["total"]: base_url = f"{url}&limit={limit}&page={page}" if request: request.increase_request_count(request_type) try: r = client.get(base_url) r.raise_for_status() except httpx.HTTPStatusError as hex: logger.warning( f"{base_url} returned error {hex.response.status_code}") break try: payload = r.json() except json.decoder.JSONDecodeError as jde: logger.warning(f"{base_url} raised {jde}") break result["total"] = payload["total"] if not payload["items"]: break for item in payload.get("items"): result["items"].append(item) if len(result["items"]) >= result["total"]: break page += 1 loops += 1 return result
async def test_digest_auth_no_specified_qop(): url = "https://example.org/" auth = DigestAuth(username="******", password="******") client = Client(dispatch=MockDigestAuthDispatch(qop=None)) response = await client.get(url, auth=auth) assert response.status_code == 200 auth = response.json()["auth"] assert auth.startswith("Digest ") response_fields = [field.strip() for field in auth[auth.find(" ") :].split(",")] digest_data = dict(field.split("=") for field in response_fields) assert "qop" not in digest_data assert "nc" not in digest_data assert "cnonce" not in digest_data assert digest_data["username"] == '"tomchristie"' assert digest_data["realm"] == '"*****@*****.**"' assert len(digest_data["nonce"]) == 64 + 2 # extra quotes assert digest_data["uri"] == '"/"' assert len(digest_data["response"]) == 64 + 2 assert len(digest_data["opaque"]) == 64 + 2 assert digest_data["algorithm"] == "SHA-256"
async def test_digest_auth(algorithm, expected_hash_length, expected_response_length): url = "https://example.org/" auth = DigestAuth(username="******", password="******") client = Client(dispatch=MockDigestAuthDispatch(algorithm=algorithm)) response = await client.get(url, auth=auth) assert response.status_code == 200 auth = response.json()["auth"] assert auth.startswith("Digest ") response_fields = [field.strip() for field in auth[auth.find(" ") :].split(",")] digest_data = dict(field.split("=") for field in response_fields) assert digest_data["username"] == '"tomchristie"' assert digest_data["realm"] == '"*****@*****.**"' assert "nonce" in digest_data assert digest_data["uri"] == '"/"' assert len(digest_data["response"]) == expected_response_length + 2 # extra quotes assert len(digest_data["opaque"]) == expected_hash_length + 2 assert digest_data["algorithm"] == algorithm assert digest_data["qop"] == "auth" assert digest_data["nc"] == "00000001" assert len(digest_data["cnonce"]) == 16 + 2
def request(self, method: str, url: str, payload: dict = None) -> Union[dict, None]: if payload: payload = { 'params': payload } if method.lower() == 'get' else { 'data': payload } else: payload = {} with Client() as session: resp = getattr(session, method)(url=url, **payload, cookies=self.cookies) if resp.status_code == 200: return resp.json() elif resp.status_code == 400: raise AmoBadRequest elif resp.status_code == 401: raise AmoAuthError elif resp.status_code == 402: raise AmoPaymentRequired elif resp.status_code == 403: raise AmoForbidden elif resp.status_code == 404: raise AmoPageNotFoundError elif resp.status_code == 429: raise AmoTooManyRequests elif resp.status_code == 500: raise AmoInternalError else: raise AmoUnknownError
def __init__(self, host: str, client: Client = None) -> None: self.__host = host self.__client = client if client else Client()