def __init__(self, host='', port=None): self._host = host or 'localhost' self._port = port or random.randint(20000, 50000) self._interceptor = RequestsInterceptor(self.get_wsgi_app, host=self._host, port=self._port) self._url = 'http://{0}:{1}/'.format(self._host, self._port) self._pages = {}
def test_requests_interceptor_host(): hostname = str(uuid4()) port = 9999 with RequestsInterceptor(app=app, host=hostname, port=port) as url: response = requests.get(url) assert response.status_code == 200 assert 'WSGI intercept successful!' in response.text
def test_load_and_query(self): with RequestsInterceptor(self.app, host='127.0.0.1', port=80) as url: r = requests.post("{}/api/call/update".format(url)) assert "application/samlmetadata+xml" in r.headers['Content-Type'] # verify we managed to load something into the DB r = requests.get("{}/api/status".format(url)) assert "application/json" in r.headers['content-type'] assert "version" in r.text assert r.status_code == 200 data = r.json() assert 'version' in data assert 'store' in data assert 'size' in data['store'] assert int(data['store']['size']) > 0 # load the NORDUnet IdP as xml r = requests.get( "{}/entities/%7Bsha1%7Dc50752ce1d12c2b37da13a1a396b8e3895d35dd9.xml" .format(url)) assert r.status_code == 200 assert 'application/samlmetadata+xml' in r.headers['Content-Type'] # load the NORDUnet IdP as json r = requests.get( "{}/entities/%7Bsha1%7Dc50752ce1d12c2b37da13a1a396b8e3895d35dd9.json" .format(url)) assert "application/json" in r.headers['Content-Type'] assert r.status_code == 200 data = r.json() assert data is not None and len(data) == 1 info = data[0] assert type(info) == dict assert info['title'] == 'NORDUnet' assert 'nordu.net' in info['scope']
def test_requests_interceptor_url(): hostname = str(uuid4()) port = 9999 url = 'http://%s:%s/' % (hostname, port) with RequestsInterceptor(app=app, url=url) as target_url: response = requests.get(target_url) assert response.status_code == 200 assert 'WSGI intercept successful!' in response.text
def test_status(self): with RequestsInterceptor(self.app, host='127.0.0.1', port=80) as url: r = requests.get("{}/api/status".format(url)) assert ("application/json" in r.headers['content-type']) assert ("version" in r.text) assert (r.status_code == 200) data = r.json() assert ('version' in data) assert ('store' in data) assert ('size' in data['store']) assert (int(data['store']['size']) >= 0)
def test_requests_in_out(): hostname = str(uuid4()) port = 9999 url = 'http://%s:%s/' % (hostname, port) with RequestsInterceptor(app=app, url=url) as target_url: response = requests.get(target_url) assert response.status_code == 200 assert 'WSGI intercept successful!' in response.text # outside the context manager the intercept does not work with py.test.raises(requests.ConnectionError): requests.get(url)
def test_api_resources(self): """""" with RequestsInterceptor(self.app, host='127.0.0.1', port=80) as url: r1 = requests.post(f'{url}/api/call/update') assert r1.status_code == 200 r2 = requests.get(f'{url}/api/resources') assert 'application/json' in r2.headers['content-type'] # assert "version" in r.text assert r2.status_code == 200 data = r2.json() expected = [{ 'Resource': f'file://{self.test01}', 'HTTP Response Headers': { 'Content-Length': 3633 }, 'Status Code': '200', 'Reason': None, 'State': 'Ready', 'Entities': ['https://idp.example.com/saml2/idp/metadata.php'], 'Validation Errors': {}, 'Expiration Time': data[0]['Expiration Time'], # '2021-04-14 15:21:33.150742', 'Expired': False, 'Valid': True, 'Parser': 'SAML', 'Last Seen': data[0]['Last Seen'], # '2021-04-14 14:21:33.150781', }] assert data == expected # Now check the timestamps now = datetime.now(tz=timezone.utc) exp = datetime.fromisoformat(data[0]['Expiration Time']) assert (exp - now).total_seconds() > 3590 assert (exp - now).total_seconds() < 3610 last_seen = datetime.fromisoformat(data[0]['Last Seen']) assert (last_seen - now).total_seconds() < 60 assert os.path.exists( os.path.join(config.local_copy_dir, urlescape(f'file://{self.test01}')))
def test_webfinger(self): with RequestsInterceptor(self.app, host='127.0.0.1', port=80) as url: r = requests.get("{}/.well-known/webfinger?resource={}".format(url, url)) assert (r.status_code == 200) assert ("application/json" in r.headers['content-type']) data = r.json() assert(data is not None) assert('expires' in data) assert('links' in data) assert('subject' in data) assert(data['subject'] == url) for link in data['links']: assert('rel' in link) assert('href' in link)
def test_webfinger_rel_dj(self): with RequestsInterceptor(self.app, host='127.0.0.1', port=80) as url: r = requests.get("{}/.well-known/webfinger?resource={}&rel=disco-json".format(url, url)) assert (r.status_code == 200) assert ("application/json" in r.headers['content-type']) data = r.json() assert(data is not None) assert('expires' in data) assert('links' in data) assert('subject' in data) assert(data['subject'] == url) for link in data['links']: assert('rel' in link) assert(link['rel'] in 'disco-json') assert(link['rel'] not in 'urn:oasis:names:tc:SAML:2.0:metadata') assert('href' in link)
def test_header_app(): """Make sure the header apps returns headers. Many libraries normalize headers to strings so we're not going to get exact matches. """ header_value = 'alpha' header_value_str = 'alpha' def header_app(): return app({'request-id': header_value}) with RequestsInterceptor(header_app) as url: response = requests.get(url) assert response.headers['request-id'] == header_value_str
def run_assertion(self): form = { 'title': 'タイトル', 'handle': 'あ', 'message': 'メッセージ', } with RequestsInterceptor(self.get_app, host='localhost', port=8081) as url: actual = requests.post(url, data=form, allow_redirects=False) assert actual.status_code == 302 assert len(actual.cookies) == 1 handle = actual.cookies['handle'] print(handle) #=> "\343\201\202" print(type(handle)) #=> <class 'str'> return handle
def test_encoding_violation(): """If the header is unicode we expect boom.""" header_key = 'request-id' if six.PY2: header_value = u'alpha' else: header_value = b'alpha' # we expect our http library to give us a str returned_header = 'alpha' def header_app(): return app({header_key: header_value}) # save original strict_response_headers = wsgi_intercept.STRICT_RESPONSE_HEADERS # With STRICT_RESPONSE_HEADERS True, response headers must be # native str. with RequestsInterceptor(header_app) as url: wsgi_intercept.STRICT_RESPONSE_HEADERS = True with py.test.raises(TypeError) as error: response = requests.get(url) assert (str(error.value) == "Header has a key '%s' or value '%s' " "which is not a native str." % (header_key, header_value)) # When False, other types of strings are okay. wsgi_intercept.STRICT_RESPONSE_HEADERS = False response = requests.get(url) assert response.headers['request-id'] == returned_header # reset back to saved original wsgi_intercept.STRICT_RESPONSE_HEADERS = \ strict_response_headers
class MockWebServer(object): def __init__(self, host='', port=None): self._host = host or 'localhost' self._port = port or random.randint(20000, 50000) self._interceptor = RequestsInterceptor(self.get_wsgi_app, host=self._host, port=self._port) self._url = 'http://{0}:{1}/'.format(self._host, self._port) self._pages = {} def get_wsgi_app(self): return self.wsgi def wsgi(self, environ, start_response): method = environ['REQUEST_METHOD'] path, query = environ['PATH_INFO'], None if '?' in path: path, query = path.split('?', 1) page = self._pages.get(path) if not page: start_response('404 Not Found', []) return [] body = environ['wsgi.input'].read() page.record_request( Request( method=method, query=query, headers=environ.items(), body=body, )) headers = [] content = [page.content] if content: headers += [('content-type', page.content_type)] start_response('{} {}'.format(page.status, page.status_message), headers) return content @property def url(self): return self._url def __enter__(self): self._interceptor.__enter__() return self def __exit__(self, *exc): self._interceptor.__exit__(*exc) def page(self, url): import urllib if url not in self._pages: full_url = urllib.basejoin(self.url, url) self._pages[url] = Page(full_url) return self._pages[url] def set(self, url, content, content_type='text/plain'): page = self.page(url) page.set_content(content, content_type) return page