def _parse_raw_request(cls, raw_request: bytes) -> PreparedRequest: """ ok, this is kind of janky, but AWS4Auth is meant to work with requests, so expects a PreparedRequest """ body: Optional[str] = None headers, body = raw_request.decode('utf-8').split('\r\n\r\n', 1) # strip the trailing \r\n if present if len(body) == 0: body = None elif body.endswith('\r\n'): body = body[:-2] # hi! if you get here looking for folded headers, that's obsolete and we ought not be generating them method_et_al, headers = headers.split('\r\n', 1) headers_as_dict: Mapping[str, str] = \ dict([(k.strip(), v.strip()) for k, v in [h.split(':', 1) for h in headers.split('\r\n')]]) # this is a little janky, really should be one or more spaces method, path_et_al, version = method_et_al.split(' ', 2) # this is very sketchy looking but I promise that we don't care about the host, port, or scheme here url = 'https://nope/' + path_et_al req = PreparedRequest() req.prepare_method(method) req.prepare_url(url, {}) req.prepare_headers(headers_as_dict) req.prepare_body(data=body, files=None) # don't req.prepare_content_length, we already had that in headers surely return req
def make_url(api_key, location_name): """ Arguments: api_key (String) The Mapzen API key you get during registration e.g. 'MY_API_KEY' location_name (String) Represents the human-readable name of the location to geocode e.g. 'Stanford University' Returns: String A url formatted according to Mapzen Search API spec. e.g. https://search.mapzen.com/v1/search?api_key=MY_API_KEY&text=Stanford+University """ my_params = {'text': location_name, 'api_key': api_key} p = PreparedRequest() p.prepare_url(url=BASE_ENDPOINT, params=my_params) return p.url
def test_get_cutout_success(self, mock_session): resolution = 0 x_range = [20, 40] y_range = [50, 70] z_range = [30, 50] time_range = [10, 25] id_list = [] url_prefix = 'https://api.theboss.io' auth = 'mytoken' fake_prepped_req = PreparedRequest() fake_prepped_req.headers = {} mock_session.prepare_request.return_value = fake_prepped_req data = numpy.random.randint(0, 3000, (15, 20, 20, 20), numpy.uint16) compressed_data = blosc.compress(data, typesize=16) fake_response = Response() fake_response.status_code = 200 fake_response._content = compressed_data mock_session.send.return_value = fake_response send_opts = {} actual = self.vol.get_cutout(self.chan, resolution, x_range, y_range, z_range, time_range, id_list, url_prefix, auth, mock_session, send_opts) numpy.testing.assert_array_equal(data, actual)
def fetch(self, entry_url: str) -> JobsList: self.jobs = JobsList() page_buffer = [] for job_link in self.get_jobs_list(entry_url): try: page_buffer.append(self.get_job(job_link)) except Exception as e: print("Error Processing %s %s " % (job_link, e)) page = 1 while len(page_buffer) > 0: self.jobs.extend(page_buffer) page_buffer = [] prep_url = PreparedRequest() prep_url.prepare(url=entry_url, params={'page': page}) next_page_url = prep_url.url for job_link in self.get_jobs_list(next_page_url): try: page_buffer.append(self.get_job(job_link)) except Exception as e: print("Error Processing %s %s " % (job_link, e)) print("Scraped page %s" % page) page += 1 return self.jobs
def test_get_bounding_box_success(self, mock_session, mock_resp): resolution = 0 id = 44444 bb_type = 'loose' url_prefix = 'https://api.theboss.io' auth = 'mytoken' send_opts = {} fake_prepped_req = PreparedRequest() fake_prepped_req.headers = {} mock_session.prepare_request.return_value = fake_prepped_req mock_session.send.return_value = mock_resp mock_resp.status_code = 200 mock_resp.json.return_value = expected = { 'x_range': [0, 10], 'y_range': [0, 10], 'z_range': [0, 10], 't_range': [0, 10] } actual = self.vol.get_bounding_box(self.anno_chan, resolution, id, bb_type, url_prefix, auth, mock_session, send_opts) self.assertEqual(expected, actual)
def add_auth(self, r: requests.PreparedRequest) -> requests.PreparedRequest: rr = r.copy() url = urlparse(r.url) if 'Host' in r.headers: netloc = r.headers['Host'].decode('utf-8') else: netloc = url.netloc rr.url = urlunparse((url.scheme, netloc, url.path, url.params, url.query, url.fragment)) if r.method == 'POST': if r.body: if isinstance(r.body, bytes): body = dict(parse_qsl(r.body.decode("utf-8").strip())) elif isinstance(r.body, str): body = dict(parse_qsl(r.body.strip())) r.body = urlencode(self.update_params(rr, body)) new_headers = r.headers new_headers[ 'Content-Type'] = 'application/x-www-form-urlencoded; charset=utf-8' r.headers = new_headers elif r.method == 'GET': url = urlparse(r.url) if url.query: new_query = urlencode( self.update_params(rr, dict(parse_qsl(url.query)))) r.url = urlunparse((url.scheme, url.netloc, url.path, url.params, new_query, url.fragment)) return r
def test_curl_options_for_post_with_some_file_data(tmpdir): p = tmpdir.join("test.txt") p.write("content") prepared_request = PreparedRequest() prepared_request.prepare( url="http://somefakeurl", method="POST", data=p, ) curl_request = CURLRequest(prepared_request) curl_options = curl_request.options expected_headers = [] assert len(curl_options) == 7 assert curl_options[pycurl.URL] == "http://somefakeurl/" assert curl_options[pycurl.SSL_VERIFYHOST] == 0 assert curl_options[pycurl.SSL_VERIFYPEER] == 0 assert curl_options[pycurl.UPLOAD] is True assert curl_options[pycurl.CUSTOMREQUEST] == "POST" assert curl_options[pycurl.HTTPHEADER] == expected_headers # We actually call the function to test that it reads # the expected bytes assert curl_options[pycurl.READFUNCTION]() == "content"
def test_get_ids_in_region_success(self, mock_session, mock_resp): resolution = 0 x_range = [0, 100] y_range = [10, 50] z_range = [20, 42] t_range = [0, 1] url_prefix = 'https://api.theboss.io' auth = 'mytoken' send_opts = {} fake_prepped_req = PreparedRequest() fake_prepped_req.headers = {} mock_session.prepare_request.return_value = fake_prepped_req mock_session.send.return_value = mock_resp mock_resp.status_code = 200 mock_resp.json.return_value = {'ids': ['1', '10']} actual = self.vol.get_ids_in_region(self.anno_chan, resolution, x_range, y_range, z_range, t_range, url_prefix, auth, mock_session, send_opts) expected = [1, 10] self.assertEqual(expected, actual)
def test__call__(mocker): auth = SpotifyAPIAuth() mocker.patch.object(auth, "_set_tokens") auth.tokens = {"access_token": "xxxxx"} req = PreparedRequest() req.prepare(method="GET", url="https://spotify.com", auth=auth) assert req.headers["authorization"] == "Bearer xxxxx"
def get_lookup_result(self, threat, atom_type, hashkey_only) -> list: params = {'atom_value': threat, 'atom_type': atom_type, 'hashkey_only': hashkey_only} req = PreparedRequest() # Adding parameters using requests' tool req.prepare_url(self.url, params) response = self.datalake_requests(req.url, 'get', headers={'Authorization': self.tokens[0]}) return response
def test_pool_send_configures_handler_correctly(): prepared_request = PreparedRequest() prepared_request.prepare(url="http://somefakeurl", method="GET", headers={}) curl_request = CURLRequest(prepared_request) curl_handler = FakeCurlHandler() # Configure the handler to return some body and headers curl_handler.body = b"somebodydata" curl_handler.http_status = 200 curl_handler.header_lines = [ "HTTP/1.1 200 OK\n".encode("iso-8859-1"), "Content-Language: en-US\n".encode("iso-8859-1"), "Cache-Control: no-cache\n".encode("iso-8859-1"), ] pool = CURLHandlerPool(curl_factory=lambda: curl_handler) response = pool.send(curl_request) assert curl_handler.performed assert response.body.getvalue() == b"somebodydata" assert response.http_code == 200 assert response.headers == { "Cache-Control": "no-cache", "Content-Language": "en-US", } # Assert that the curl options from the requests were set to the handler for opt, val in curl_request.options.items(): assert curl_handler.options[opt] == val
def test_cutout_get_success(self, mock_session): resolution = 0 x_range = '20:40' y_range = '50:70' z_range = '30:50' time_range = '10:25' url_prefix = 'https://api.theboss.io' auth = 'mytoken' fake_prepped_req = PreparedRequest() fake_prepped_req.headers = {} mock_session.prepare_request.return_value = fake_prepped_req data = numpy.random.randint(0, 3000, (15, 20, 20, 20), numpy.uint16) compressed_data = blosc.pack_array(data) fake_response = Response() fake_response.status_code = 200 fake_response._content = compressed_data mock_session.send.return_value = fake_response send_opts = {} actual = self.vol.cutout_get( self.chan, resolution, x_range, y_range, z_range, time_range, url_prefix, auth, mock_session, send_opts) numpy.testing.assert_array_equal(data, actual)
def __call__(self, request: PreparedRequest) -> PreparedRequest: package_name, provider_version = _get_provider_info() request.headers["User-Agent"] = f"{package_name}-v{provider_version}" request.headers["Content-Type"] = "application/json" request.headers["Authorization"] = f"Token {self.token}" return request
def test_curl_response_with_cookies(): prepared_request = PreparedRequest() prepared_request.prepare(url="http://somefakeurl", method="GET", headers={}) curl_request = CURLRequest(prepared_request) curl_response = CURLResponse(curl_request) curl_response.http_code = 200 header_lines = [ "HTTP/1.1 200 OK\n", "Content-Language: en-US\n", "Cache-Control: no-cache\n", "Set-Cookie: foo=123; SameSite=None; Secure; Max-Age=2592000\n", "Set-Cookie: bar=abc; HttpOnly\n", ] for header_line in header_lines: # We provide lines encoded as defined in http standard curl_response.add_header_from_raw_line( header_line.encode("iso-8859-1")) req_response = curl_response.to_requests_response() assert len(req_response.cookies) == 2 assert req_response.cookies.get("foo") == "123" assert req_response.cookies.get("bar") == "abc"
def test_adapter_performs_a_successful_request_through_proxy(): request = PreparedRequest() request.prepare(url="http://somefakeurl", method="GET", headers={}) proxies = { "http": "http://localhost:8080", "https": "https://localhost:8081", } header_lines = [ b"HTTP/1.1 200 OK\n", b"Content-Language: en-US\n", ] pool = FakePool() pool.add_response(200, b"data obtained through proxy", header_lines) pool_provider = FakePoolProvider() pool_provider.add_pool_for_proxied_url("http://localhost:8080", request.url, pool) adapter = CURLAdapter( pool_provider_factory=lambda *args, **kwargs: pool_provider) response = adapter.send(request, proxies=proxies) assert response.status_code == 200 assert response.text == "data obtained through proxy" assert response.headers == {"Content-Language": "en-US"}
def main(query, pages, city): city_url = urljoin(BASE_URL, city + '/') url = urljoin(city_url, f'q-{query}') response = requests.get(url) soup = get_soup(response) electronics_tag = soup.find('a', attrs={'data-id': '37'}) electronics_link = electronics_tag.attrs['href'] response = requests.get(electronics_link) soup = get_soup(response) accessories_tag = soup.find('a', attrs={'data-id': '44'}) accessories_link = accessories_tag.attrs['href'] response = requests.get(accessories_link) soup = get_soup(response) phones_tag = soup.find('a', attrs={'data-id': '85'}) phones_link = phones_tag.attrs['href'] links = [] req = PreparedRequest() for page_number in range(pages): req.prepare_url(phones_link, params={'page': page_number + 1}) page_links = get_links_on_page(req.url) links.extend(page_links) filename = f'{query}.csv' write_to_csv_file(filename, links)
def test_curl_options_for_post_put_with_some_string_data( http_method, data, expected_data): prepared_request = PreparedRequest() prepared_request.prepare( url="http://somefakeurl", method=http_method, data=data, ) curl_request = CURLRequest(prepared_request) curl_options = curl_request.options expected_headers = [ f"Content-Length: {len(data)}", ] assert len(curl_options) == 7 assert curl_options[pycurl.URL] == "http://somefakeurl/" assert curl_options[pycurl.SSL_VERIFYHOST] == 0 assert curl_options[pycurl.SSL_VERIFYPEER] == 0 assert curl_options[pycurl.UPLOAD] is True assert curl_options[pycurl.CUSTOMREQUEST] == http_method assert curl_options[pycurl.HTTPHEADER] == expected_headers # We actually call the function to test that it reads # the expected bytes assert curl_options[pycurl.READFUNCTION]() == expected_data
def send(self, request: PreparedRequest, stream: bool = False, timeout: Optional[HTTPTimeout] = None, verify: Union[bool, str] = True, cert: Optional[HTTPClientCertificate] = None, proxies: Optional[Mapping[str, str]] = None) -> Response: (_, _, path, query, fragment) = urlsplit(request.url or '') """Sends a prepared http request to the metadata API server""" request.prepare_url( urljoin( self._base_url, urlunsplit(('', '', path, query, fragment)), ), {}, ) request.prepare_auth(None, self._base_url) return super(MetadataAPIAdapter, self).send( request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies, )
def get_threats(self, query_hash: str, limit=10, response_format="application/json") -> dict: url = self.url + query_hash params = {'limit': limit} req = PreparedRequest() # Adding parameters using requests' tool req.prepare_url(url, params) headers = {'Authorization': self.tokens[0], 'Accept': response_format} response = self.datalake_requests(req.url, 'get', headers=headers) return response
def __call__( self, request: requests.PreparedRequest) -> requests.PreparedRequest: request = super().__call__(request) data = furl.Query(request.body) data.set([("client_id", self.client_id)]) request.prepare_body(data.params, files=None) return request
def sub_link(self): """生成该用户的订阅地址""" p = PreparedRequest() token = base64.b64encode(self.username.encode()).decode() url = settings.HOST + "/server/subscribe/" params = {"token": token} p.prepare_url(url, params) return p.url
def sub_link(self): '''生成该用户的订阅地址''' p = PreparedRequest() token = base64.b64encode(self.username.encode()).decode() url = settings.HOST + 'server/subscribe/' params = {'token': token} p.prepare_url(url, params) return p.url
def get_query(self, data=None): p = PreparedRequest() req_data = build_req_data(data) p.prepare_url( self.target, req_data) # 该方法只是其中一个方法,这个方法就是专门拼接params与url的,生成 EncodeUrl。 self.query = p.url return self.query
def setUp(self): self.alt_url = ('http://example.com/path/to/end/point?query=string' '&foo=bar') self.p = PreparedRequest() self.p.body = 'Foo bar' self.p.headers = {'User-Agent': 'betamax/test'} self.p.url = 'http://example.com/path/to/end/point?query=string' self.p.method = 'GET' self.p._cookies = RequestsCookieJar()
def __call__(self, r: PreparedRequest): # 从配置文件读取访问系统的 admin token, 放置到请求头中 r.headers[ "Authorization"] = f"Bearer {getattr(settings, 'BCS_APIGW_TOKEN', '')}" r.headers["Content-Type"] = "application/json" if self.access_token: r.headers['X-BKAPI-AUTHORIZATION'] = json.dumps( {"access_token": self.access_token}) return r
def test_http_method(self): unknown_method = ZoomAPIException(0, 'Test', None, 'Test message') self.assertEqual(unknown_method.http_method, None) req = PreparedRequest() req.prepare_method('GET') known_method = ZoomAPIException(0, 'Test', req, 'Message') self.assertEqual(known_method.http_method, 'GET')
def _add_url_params(self, url: str, extra_params: dict): """Add params to URL.""" params = self.params.copy() params.update(extra_params) req = PreparedRequest() req.prepare_url(url, params) return req.url
def test_use_chunked_upload_is_true_for_streamed_data(): prepared_request = PreparedRequest() some_stream_data = six.StringIO("this is some data as string") prepared_request.prepare( url="http://somefakeurl", data=some_stream_data, ) curl_request = CURLRequest(prepared_request) assert curl_request.use_chunked_upload is True
def test_token_auth(self): token = {'access_token': '123456789'} auth = OAuthTokenAuthentication("client_id", token) session = auth.signed_session() request = PreparedRequest() request.prepare("GET", "https://example.org") session.auth(request) assert request.headers == {'Authorization': 'Bearer 123456789'}
def __call__(self, r: P): # Get nonce and authenticate the Request nonce = self.new_nonce() self.auth_request(r, nonce) # Register hooks r.register_hook('response', self.handle_401) r.register_hook('response', self.handle_redirect) return r
def __call__(self, r: PreparedRequest): # 从配置文件读取访问系统的通用 Token,置入请求头中 auth_token = f'Bearer {getattr(settings, "BCS_APIGW_TOKEN", "")}' r.headers['Authorization'] = auth_token r.headers['Content-Type'] = 'application/json' # 在 GET 请求参数中追加 access_token r.url = update_url_parameters(r.url, {'access_token': self.access_token}) return r
def generate_request(method, url, body): """ Generate our own custom request, so we can calculate digest auth. """ method = method.upper() url = url files = [] json_string = None headers = CaseInsensitiveDict({ 'Accept': 'application/json', 'Accept-Encoding': 'gzip, deflate', 'Connection': 'keep-alive', 'Content-Type': 'application/json', 'User-Agent': 'stormpath-flask/0.4.4 flask/0.10.1 stormpath-sdk-python/2.4.5 python/2.7.6 Linux/LinuxMint (Linux-3.13.0-37-generic-x86_64-with-LinuxMint-17.1-rebecca)' }) if body: headers.update({'Content-Length': str(len(json.dumps(body)))}) params = OrderedDict() auth = Sauthc1Signer( id=os.environ.get('STORMPATH_API_KEY_ID'), secret=os.environ.get('STORMPATH_API_KEY_SECRET')) cookies = RequestsCookieJar() hooks = {'response': []} pr = PreparedRequest() if body: json_body = json.dumps(body) else: json_body = None pr.prepare( method=method.upper(), url=url, files=files, data=json_body, json=json_string, headers=headers, params=params, auth=auth, cookies=cookies, hooks=hooks, ) return pr
def test_cutout_get_failure(self, mock_session): resolution = 0 x_range = '20:40' y_range = '50:70' z_range = '30:50' time_range = '10:25' url_prefix = 'https://api.theboss.io' auth = 'mytoken' fake_prepped_req = PreparedRequest() fake_prepped_req.headers = {} mock_session.prepare_request.return_value = fake_prepped_req fake_response = Response() fake_response.status_code = 403 mock_session.send.return_value = fake_response send_opts = {} with self.assertRaises(HTTPError): actual = self.vol.cutout_get( self.chan, resolution, x_range, y_range, z_range, time_range, url_prefix, auth, mock_session, send_opts)
def add_api_key(self, r: P): r.body['key'] = self.api_key
def add_nonce(self, r: P, nonce: str): r.body['nonce'] = nonce
def _download(self, url, params=None, filename=None, data=None): """ Download the URL with GET or POST and the chosen parameters. Will call the ``_handle_requests_http_error()`` method if the response comes back with an HTTP code other than 200. Returns the response object if successful and ``filename`` is not given - if given it will save the response to the specified file and return ``None``. By default it will send a GET request - if data is given it will send a POST request. :param url: The URL to download from. :type url: str :param params: Additional URL parameters. :type params: dict :param filename: String or file like object. Will download directly to the file. If specified, this function will return nothing. :type filename: str or file-like object :param data: If specified, a POST request will be sent with the data in the body of the request. :type data: dictionary, bytes, or file-like object :return: The response object assuming ``filename`` is ``None``. :rtype: :class:`requests.Response` """ _request_args = {"url": url, "headers": {"User-Agent": self._user_agent}, "params": params} # Stream to file - no need to keep it in memory for large files. if filename: _request_args["stream"] = True if self._debug: # Construct the same URL requests would construct. from requests import PreparedRequest # noqa p = PreparedRequest() p.prepare(method="GET", **_request_args) print("Downloading %s ..." % p.url) # Workaround for old request versions. try: if data is None: r = requests.get(**_request_args) else: _request_args["data"] = data r = requests.post(**_request_args) except TypeError: if "stream" in _request_args: del _request_args["stream"] if data is None: r = requests.get(**_request_args) else: _request_args["data"] = data r = requests.post(**_request_args) # Only accept code 200. if r.status_code != 200: self._handle_requests_http_error(r) # Return if nothing else happens. if not filename: return r _chunk_size = 1024 if hasattr(filename, "write"): for chunk in r.iter_content(chunk_size=_chunk_size): if not chunk: continue filename.write(chunk) else: with io.open(filename, "wb") as fh: for chunk in r.iter_content(chunk_size=_chunk_size): if not chunk: continue fh.write(chunk)
def add_signature(self, r: P, nonce: str): message = self.build_message(r, nonce) signature = self.sign(message) r.body['signature'] = signature.upper()
def request_url(url, params): pre = PreparedRequest() pre.prepare_url(url, params) return pre.url
def add_nonce(self, r: P, nonce: str): body = self.parse_data(r.body) body['nonce'] = nonce r.prepare_body(data=body, files=None)
def add_api_key(self, r: P): # Add the API key as a query parameter url, query = self.url_query_split(r.url) params = self.parse_data(query) params[self.api_key_param] = self.api_key r.prepare_url(url, params)
class TestMatchers(unittest.TestCase): def setUp(self): self.alt_url = ('http://example.com/path/to/end/point?query=string' '&foo=bar') self.p = PreparedRequest() self.p.body = 'Foo bar' self.p.headers = {'User-Agent': 'betamax/test'} self.p.url = 'http://example.com/path/to/end/point?query=string' self.p.method = 'GET' self.p._cookies = RequestsCookieJar() def test_matcher_registry_has_body_matcher(self): assert 'body' in matchers.matcher_registry def test_matcher_registry_has_digest_auth_matcher(self): assert 'digest-auth' in matchers.matcher_registry def test_matcher_registry_has_headers_matcher(self): assert 'headers' in matchers.matcher_registry def test_matcher_registry_has_host_matcher(self): assert 'host' in matchers.matcher_registry def test_matcher_registry_has_method_matcher(self): assert 'method' in matchers.matcher_registry def test_matcher_registry_has_path_matcher(self): assert 'path' in matchers.matcher_registry def test_matcher_registry_has_query_matcher(self): assert 'query' in matchers.matcher_registry def test_matcher_registry_has_uri_matcher(self): assert 'uri' in matchers.matcher_registry def test_body_matcher(self): match = matchers.matcher_registry['body'].match assert match(self.p, { 'body': 'Foo bar', 'headers': {'User-Agent': 'betamax/test'}, 'uri': 'http://example.com/path/to/end/point?query=string', 'method': 'GET', }) assert match(self.p, { 'body': b'', 'headers': {'User-Agent': 'betamax/test'}, 'uri': 'http://example.com/path/to/end/point?query=string', 'method': 'GET', }) is False def test_body_matcher_without_body(self): p = self.p.copy() p.body = None match = matchers.matcher_registry['body'].match assert match(p, { 'body': 'Foo bar', 'headers': {'User-Agent': 'betamax/test'}, 'uri': 'http://example.com/path/to/end/point?query=string', 'method': 'GET', }) is False assert match(p, { 'body': b'', 'headers': {'User-Agent': 'betamax/test'}, 'uri': 'http://example.com/path/to/end/point?query=string', 'method': 'GET', }) def test_digest_matcher(self): match = matchers.matcher_registry['digest-auth'].match assert match(self.p, {'headers': {}}) saved_auth = ( 'Digest username="******", realm="realm", nonce="nonce", uri="/", ' 'response="r", opaque="o", qop="auth", nc=00000001, cnonce="c"' ) self.p.headers['Authorization'] = saved_auth assert match(self.p, {'headers': {}}) is False assert match(self.p, {'headers': {'Authorization': saved_auth}}) new_auth = ( 'Digest username="******", realm="realm", nonce="nonce", uri="/", ' 'response="e", opaque="o", qop="auth", nc=00000001, cnonce="n"' ) assert match(self.p, {'headers': {'Authorization': new_auth}}) new_auth = ( 'Digest username="******", realm="realm", nonce="nonce", uri="/", ' 'response="e", opaque="o", qop="auth", nc=00000001, cnonce="n"' ) assert match(self.p, {'headers': {'Authorization': new_auth}}) is False def test_headers_matcher(self): match = matchers.matcher_registry['headers'].match assert match(self.p, {'headers': {'User-Agent': 'betamax/test'}}) assert match(self.p, {'headers': {'X-Sha': '6bbde0af'}}) is False def test_host_matcher(self): match = matchers.matcher_registry['host'].match assert match(self.p, {'uri': 'http://example.com'}) assert match(self.p, {'uri': 'https://example.com'}) assert match(self.p, {'uri': 'https://example.com/path'}) assert match(self.p, {'uri': 'https://example2.com'}) is False def test_method_matcher(self): match = matchers.matcher_registry['method'].match assert match(self.p, {'method': 'GET'}) assert match(self.p, {'method': 'POST'}) is False def test_path_matcher(self): match = matchers.matcher_registry['path'].match assert match(self.p, {'uri': 'http://example.com/path/to/end/point'}) assert match(self.p, {'uri': 'http://example.com:8000/path/to/end/point'}) assert match(self.p, {'uri': 'http://example.com:8000/path/to/end/'}) is False def test_query_matcher(self): match = matchers.matcher_registry['query'].match assert match( self.p, {'uri': 'http://example.com/path/to/end/point?query=string'} ) assert match( self.p, {'uri': 'http://example.com/?query=string'} ) self.p.url = self.alt_url assert match( self.p, {'uri': self.alt_url} ) # Regression test (order independence) assert match( self.p, {'uri': 'http://example.com/?foo=bar&query=string'} ) # Regression test (no query issue) assert match(self.p, {'uri': 'http://example.com'}) is False def test_uri_matcher(self): match = matchers.matcher_registry['uri'].match assert match( self.p, {'uri': 'http://example.com/path/to/end/point?query=string'} ) assert match(self.p, {'uri': 'http://example.com'}) is False def test_uri_matcher_handles_query_strings(self): match = matchers.matcher_registry['uri'].match self.p.url = 'http://example.com/path/to?query=string&form=value' other_uri = 'http://example.com/path/to?form=value&query=string' assert match(self.p, {'uri': other_uri}) is True
def auth_request(self, r: P, nonce: str): r.body = self.parse_data(r.body) super().auth_request(r, nonce) r.prepare_body(data=r.body, files=None)