def test_custom_headers(httpserver: HTTPServer): headers_with_values_in_direct_order = { 'Custom': 'Scheme key0="value0", key1="value1"' } httpserver.expect_request( uri='/', headers=headers_with_values_in_direct_order).respond_with_data('OK') response = requests.get(httpserver.url_for('/'), headers=headers_with_values_in_direct_order) assert response.status_code == 200 assert response.text == 'OK' # By default different order of items in header value dicts means different header values headers_with_values_in_modified_order = { 'Custom': 'Scheme key1="value1", key0="value0"' } response = requests.get(httpserver.url_for('/'), headers=headers_with_values_in_modified_order) assert response.status_code == 500 # Define header_value_matcher that ignores the order of items in header value dict def custom_header_value_matcher(actual: str, expected: str) -> bool: actual_scheme, _, actual_dict_str = actual.partition(' ') expected_scheme, _, expected_dict_str = expected.partition(' ') actual_dict = parse_dict_header(actual_dict_str) expected_dict = parse_dict_header(expected_dict_str) return actual_scheme == expected_scheme and actual_dict == expected_dict matchers = HeaderValueMatcher.DEFAULT_MATCHERS.copy() matchers['Custom'] = custom_header_value_matcher header_value_matcher = HeaderValueMatcher(matchers) httpserver.handlers.clear() httpserver.expect_request( uri='/', headers=headers_with_values_in_direct_order, header_value_matcher=header_value_matcher).respond_with_data('OK') response = requests.get(httpserver.url_for('/'), headers=headers_with_values_in_modified_order) assert response.status_code == 200 assert response.text == 'OK'
def test_get_title_should_return_current_page_title(httpserver: HTTPServer) -> None: request_path = '/page' request_url = httpserver.url_for(request_path) response_data = '<title>Test title</title>' httpserver.expect_ordered_request(request_path, method='HEAD').respond_with_data(content_type='text/html') httpserver.expect_ordered_request(request_path, method='GET').respond_with_data(content_type='text/html', response_data=response_data) class TestCrawler(Crawler): def configure(self) -> CrawlerConfiguration: return CrawlerConfiguration([CrawlRequest(request_url)]) def on_response_success(self, response: CrawlResponse) -> None: assert self.get_title() == 'Test title' def on_response_error(self, response: CrawlResponse) -> None: assert False, f'Response error: {response}' TestCrawler().start() httpserver.check_assertions()
def test_request_any_method(httpserver: HTTPServer): httpserver.expect_request("/foobar").respond_with_data("OK") response = requests.post(httpserver.url_for("/foobar")) assert response.text == "OK" assert response.status_code == 200 response = requests.delete(httpserver.url_for("/foobar")) assert response.text == "OK" assert response.status_code == 200 response = requests.put(httpserver.url_for("/foobar")) assert response.text == "OK" assert response.status_code == 200 response = requests.patch(httpserver.url_for("/foobar")) assert response.text == "OK" assert response.status_code == 200 response = requests.get(httpserver.url_for("/foobar")) assert response.text == "OK" assert response.status_code == 200
def test_evaluate_should_evaluate_function_when_element_is_found(httpserver: HTTPServer) -> None: request_path = '/page' request_url = httpserver.url_for(request_path) response_data = '<div id="test">Test</div>' httpserver.expect_ordered_request(request_path, method='HEAD').respond_with_data(content_type='text/html') httpserver.expect_ordered_request(request_path, method='GET').respond_with_data(content_type='text/html', response_data=response_data) class TestCrawler(Crawler): def configure(self) -> CrawlerConfiguration: return CrawlerConfiguration([CrawlRequest(request_url)]) def on_response_success(self, response: CrawlResponse) -> None: assert self.evaluate('#test', 'element => element.textContent') == 'Test' def on_response_error(self, response: CrawlResponse) -> None: assert False, f'Response error: {response}' TestCrawler().start() httpserver.check_assertions()
def test_update_group_events(httpserver: HTTPServer): api_client: MeetupApiClient = MeetupApiClient() sandbox_group: GroupPage = api_client.get_group( group_urlname=meetup_groups["sandbox"]["urlname"]) event_1: [EventPage] = api_client.update_group_events(group=sandbox_group, max_entries=1) event_2: [EventPage] = api_client.update_group_events(group=sandbox_group, max_entries=3) event_3: [EventPage] = api_client.update_group_events(group=sandbox_group, max_entries=2) assert isinstance(event_1[0], EventPage) assert len(event_1) == 1 assert isinstance(event_2[0], EventPage) assert len(event_2) == 3 assert event_1[0] != event_2[0] assert len(event_3) == 1 assert event_1[0] != event_3[0] not_exist_group: GroupPage = NotExistGroupPageFactory() none_group_events: [ EventPage ] = api_client.update_group_events(group=not_exist_group) assert len(none_group_events) == 0 # delete all events from sandbox group sandbox_group_events: [EventPage] = sandbox_group.events() for event in sandbox_group_events: event.delete() # test for HttpNoXRateLimitHeader execption httpserver.expect_oneshot_request("{}/events?status=past&page=1".format( sandbox_group.urlname)).respond_with_data("OK") api_client.base_url = httpserver.url_for("/") event_4: [EventPage] = api_client.update_group_events(group=sandbox_group, max_entries=1) assert len(event_4) == 0 # test for HttpNoSuccess execption httpserver.expect_oneshot_request("/HttpNoSuccess").respond_with_data("OK") api_client.base_url = httpserver.url_for("/HttpNoSuccess") event_5: [EventPage] = api_client.update_group_events(group=sandbox_group, max_entries=1) assert len(event_5) == 0
def test_ssl(): protocol = None for name in ("PROTOCOL_TLS_SERVER", "PROTOCOL_TLS", "PROTOCOL_TLSv1_2"): if hasattr(ssl, name): protocol = getattr(ssl, name) break assert protocol is not None, "Unable to obtain TLS protocol" context = ssl.SSLContext(protocol) server_crt = pjoin(assets_dir, "server.crt") server_key = pjoin(assets_dir, "server.key") root_ca = pjoin(assets_dir, "rootCA.crt") context.load_cert_chain(server_crt, server_key) with HTTPServer(ssl_context=context, port=4433) as httpserver: httpserver.expect_request("/foobar").respond_with_json({"foo": "bar"}) assert httpserver.is_running() assert requests.get(httpserver.url_for("/foobar"), verify=root_ca).json() == { 'foo': 'bar' }
def test_get_replication_updates(httpserver: test_server.HTTPServer): expected_json = { "jobs": [ { "database": "_replicator", "doc_id": "cdyno-0000001-0000003", "history": [ { "timestamp": "2017-04-29T05:01:37Z", "type": "started" }, { "timestamp": "2017-04-29T05:01:37Z", "type": "added" } ], "id": "8f5b1bd0be6f9166ccfd36fc8be8fc22+continuous", "node": "[email protected]", "pid": "<0.1850.0>", "source": "http://myserver.com/foo", "start_time": "2017-04-29T05:01:37Z", "target": "http://*****:*****@localhost:15984/cdyno-0000003/", "user": None } ], "offset": 0, "total_rows": 1 } httpserver.expect_oneshot_request("/_scheduler/jobs", method="GET").respond_with_json(expected_json) response = couch.server.replication_updates() assert response == expected_json for code in [401]: httpserver.expect_oneshot_request("/_scheduler/jobs", method="GET").respond_with_json({}, status=code) response = couch.server.replication_updates() assert isinstance(response, couchapy.CouchError) is True httpserver.expect_request("/_scheduler/jobs", method="GET").respond_with_json({}) for k in AllowedKeys.SERVER__SCHEDULER_JOBS__PARAMS: response = couch.server.replication_updates(params={k: ['test']}) assert isinstance(response, couchapy.CouchError) is False with pytest.raises(couchapy.InvalidKeysException): couch.server.replication_updates(params={'nonexisting_key': ''})
def test_type_should_raise_no_such_element_error_when_element_is_not_found(httpserver: HTTPServer) -> None: request_path = '/page' request_url = httpserver.url_for(request_path) httpserver.expect_ordered_request(request_path, method='HEAD').respond_with_data() httpserver.expect_ordered_request(request_path, method='GET').respond_with_data() class TestCrawler(Crawler): def configure(self) -> CrawlerConfiguration: return CrawlerConfiguration([CrawlRequest(request_url)]) def on_response_success(self, response: CrawlResponse) -> None: with pytest.raises(NoSuchElementError) as exc_info: self.type('#nonexistent', 'Test') assert str(exc_info.value) == 'Unable to locate element using selector #nonexistent' def on_response_error(self, response: CrawlResponse) -> None: assert False, f'Response error: {response}' TestCrawler().start() httpserver.check_assertions()
def test_switch_to_page_should_raise_no_such_page_error_when_page_does_not_exist(httpserver: HTTPServer) -> None: request_path = '/page' request_url = httpserver.url_for(request_path) httpserver.expect_ordered_request(request_path, method='HEAD').respond_with_data() httpserver.expect_ordered_request(request_path, method='GET').respond_with_data() class TestCrawler(Crawler): def configure(self) -> CrawlerConfiguration: return CrawlerConfiguration([CrawlRequest(request_url)]) def on_response_success(self, response: CrawlResponse) -> None: with pytest.raises(NoSuchPageError) as exc_info: self.switch_to_page(BrowserPage(1, request_url, 'Nonexistent')) assert str(exc_info.value) == 'No page exists with index 1' def on_response_error(self, response: CrawlResponse) -> None: assert False, f'Response error: {response}' TestCrawler().start() httpserver.check_assertions()
def test_wait_for_selector_should_raise_wait_timeout_error_when_element_does_not_exist(httpserver: HTTPServer) -> None: request_path = '/page' request_url = httpserver.url_for(request_path) httpserver.expect_ordered_request(request_path, method='HEAD').respond_with_data() httpserver.expect_ordered_request(request_path, method='GET').respond_with_data() class TestCrawler(Crawler): def configure(self) -> CrawlerConfiguration: return CrawlerConfiguration([CrawlRequest(request_url)]) def on_response_success(self, response: CrawlResponse) -> None: with pytest.raises(WaitTimeoutError) as exc_info: self.wait_for_selector('#test', visible=True, timeout=1) assert str(exc_info.value) == 'Timeout 1ms exceeded waiting for selector #test' def on_response_error(self, response: CrawlResponse) -> None: assert False, f'Response error: {response}' TestCrawler().start() httpserver.check_assertions()
def test_request_error_handling(httpserver: HTTPServer) -> None: request_path = '/response-error' request_url = httpserver.url_for(request_path) httpserver.expect_ordered_request(request_path, method='HEAD').respond_with_data() httpserver.expect_ordered_request(request_path, method='GET').respond_with_data(status=500) class TestCrawler(Crawler): def configure(self) -> CrawlerConfiguration: return CrawlerConfiguration([CrawlRequest(request_url)]) def on_response_success(self, response: CrawlResponse) -> None: assert False, f'Response success: {response}' def on_response_error(self, response: CrawlResponse) -> None: assert response.request.url == request_url assert response.status == 500 assert len(response.headers) > 0 assert response.text == '' TestCrawler().start() httpserver.check_assertions()
def test_get_replication_docs(httpserver: HTTPServer): expected_json = { "docs": [{ "database": "_replicator", "doc_id": "cdyno-0000001-0000002", "error_count": 0, "id": "e327d79214831ca4c11550b4a453c9ba+continuous", "info": None, "last_updated": "2017-04-29T05:01:37Z", "node": "[email protected]", "proxy": None, "source": "http://myserver.com/foo", "start_time": "2017-04-29T05:01:37Z", "state": "running", "target": "http://*****:*****@localhost:15984/cdyno-0000002/" }], "offset": 0, "total_rows": 1 } httpserver.expect_oneshot_request( "/_scheduler/docs", method="GET").respond_with_json(expected_json) response = couch.server.get_replication_docs() assert response == expected_json for code in [401]: httpserver.expect_oneshot_request( "/_scheduler/docs", method="GET").respond_with_json({}, status=code) response = couch.server.get_replication_docs() assert isinstance(response, CouchError) is True httpserver.expect_request("/_scheduler/docs", method="GET").respond_with_json({}) for k in AllowedKeys.SERVER__SCHEDULER_DOCS__PARAMS: response = couch.server.get_replication_docs(params={k: ['test']}) assert isinstance(response, CouchError) is False with pytest.raises(InvalidKeysException): couch.server.get_replication_docs(params={'nonexisting_key': ''})
async def forwarder(client, example_id, example_entry_ids): """A http server forwarding REST requests to `client`, bound to app under test.""" httpserver = HTTPServer() httpserver \ .expect_request(re.compile(r'/about/[0-9a-f]+')) \ .respond_with_json((await client.get(f'/about/{example_id}')).json()) httpserver \ .expect_request(re.compile(r'/list/[0-9a-f]+')) \ .respond_with_json((await client.get(f'/list/{example_id}')).json()) json_responses = [Response((await client.get(f'/json/{example_id}/{id}')).content) for id in example_entry_ids] json_responses_iter = iter(json_responses) httpserver \ .expect_request(re.compile(rf'/json/{example_id}/[0-9a-f]+')) \ .respond_with_handler(lambda _: next(json_responses_iter)) try: httpserver.start() yield httpserver finally: httpserver.stop()
def test_close_page_should_raise_value_error_when_there_is_only_one_page(httpserver: HTTPServer) -> None: request_path = '/page' request_url = httpserver.url_for(request_path) httpserver.expect_ordered_request(request_path, method='HEAD').respond_with_data() httpserver.expect_ordered_request(request_path, method='GET').respond_with_data() class TestCrawler(Crawler): def configure(self) -> CrawlerConfiguration: return CrawlerConfiguration([CrawlRequest(request_url)]) def on_response_success(self, response: CrawlResponse) -> None: page = self.get_current_page() with pytest.raises(ValueError) as exc_info: self.close_page(page) assert str(exc_info.value) == 'Cannot close the last page' def on_response_error(self, response: CrawlResponse) -> None: assert False, f'Response error: {response}' TestCrawler().start() httpserver.check_assertions()
def test_not_external(httpserver: HTTPServer): httpserver.expect_request("/page1").respond_with_data( '<img src="internal.png" />', content_type="text/html", ) httpserver.expect_request("/page2").respond_with_data( '<img src="internal.png" />', content_type="text/html", ) httpserver.expect_request("/").respond_with_data( f""" <img src="internal.png" /> <a href="{httpserver.url_for("/page1")}">page1</a> <a href="{httpserver.url_for("/page2")}">page2</a> """, content_type="text/html", ) crawler = Crawler(httpserver.url_for("/"), verbose=False, plugins=["ExternalImagesByURL"]) (res, ) = crawler.asyncio_crawl(save=False) assert res.data == []
def test_get_database_updates(httpserver: HTTPServer): expected_json = { "results": [{ "db_name": "mailbox", "type": "created", "seq": "1-g1AAAAFReJzLYWBg4MhgTmHgzcvPy09JdcjLz8gvLskBCjMlMiTJ____PyuDOZExFyjAnmJhkWaeaIquGIf2JAUgmWQPMiGRAZcaB5CaePxqEkBq6vGqyWMBkgwNQAqobD4h" }, { "db_name": "mailbox", "type": "deleted", "seq": "2-g1AAAAFReJzLYWBg4MhgTmHgzcvPy09JdcjLz8gvLskBCjMlMiTJ____PyuDOZEpFyjAnmJhkWaeaIquGIf2JAUgmWQPMiGRAZcaB5CaePxqEkBq6vGqyWMBkgwNQAqobD4hdQsg6vYTUncAou4-IXUPIOpA7ssCAIFHa60" }], "last_seq": "2-g1AAAAFReJzLYWBg4MhgTmHgzcvPy09JdcjLz8gvLskBCjMlMiTJ____PyuDOZEpFyjAnmJhkWaeaIquGIf2JAUgmWQPMiGRAZcaB5CaePxqEkBq6vGqyWMBkgwNQAqobD4hdQsg6vYTUncAou4-IXUPIOpA7ssCAIFHa60" } httpserver.expect_oneshot_request( "/_db_updates", method="POST").respond_with_json(expected_json) response = couch.server.get_database_updates() assert response == expected_json httpserver.expect_oneshot_request( "/_db_updates", method="POST").respond_with_json({}, status=401) response = couch.server.get_database_updates() assert isinstance(response, CouchError) is True assert response.status_code == 401 httpserver.expect_request("/_db_updates", method="POST").respond_with_json({}) for k in AllowedKeys.SERVER__DB_UPDATES__PARAMS: response = couch.server.get_database_updates(params={k: ['test']}) assert isinstance(response, CouchError) is False with pytest.raises(InvalidKeysException): couch.server.get_database_updates(params={'nonexisting_key': ''})
def test_link_preview(httpserver: HTTPServer): httpserver.expect_request("/preview1").respond_with_data( get_sample("twitter-card/with-image.html"), headers={"content-type": "text/html"}, ) httpserver.expect_request("/preview2").respond_with_data( get_sample("generic/h1-img.html"), headers={"content-type": "text/html"}, ) httpserver.expect_request("/preview-3.json").respond_with_data( '{}', headers={"content-type": "application/json"}, ) url = httpserver.url_for("/preview1") preview = link_preview(url) assert preview.title == "a title" assert preview.force_title == "a title" assert preview.description is None assert preview.image == "/img/heck.jpg" assert preview.absolute_image == "%s%s" % (url, preview.image) url = httpserver.url_for("/preview2") preview = link_preview(url) assert preview.title == "This title is from the first h1 tag." assert preview.description is None assert preview.image == "http://*****:*****@abc.com/the-bunny(720p)", content="OK") assert preview.force_title == "abc.com/the-bunny(720p)" preview = link_preview("https://192.168.1.1", content="OK") assert preview.force_title == "192.168.1.1" preview = link_preview("https://192.168.1.1:9696", content="OK") assert preview.force_title == "192.168.1.1:9696" preview = link_preview(httpserver.url_for('/preview-3.json')) assert preview.title is None assert preview.description is None assert preview.image is None assert preview.absolute_image is None assert preview.force_title == "Preview 3"
def test_unexpected_request(httpserver: HTTPServer): httpserver.expect_request("/foobar").respond_with_json({"foo": "bar"}) requests.get(httpserver.url_for("/nonexists")) with pytest.raises(AssertionError): httpserver.check_assertions()
def test_no_handler_status_code(httpserver: HTTPServer): httpserver.no_handler_status_code = 404 assert requests.get(httpserver.url_for("/foobar")).status_code == 404
def error_server(httpserver: HTTPServer) -> HTTPServer: for status_code in error_codes_list: httpserver.expect_request(f"/{status_code:d}").respond_with_json( '', status=status_code) return httpserver
def test_request_post_case_insensitive_method(httpserver: HTTPServer): httpserver.expect_request("/foobar", data='{"request": "example"}', method="post").respond_with_data("example_response") response = requests.post(httpserver.url_for("/foobar"), json={"request": "example"}) httpserver.check_assertions() assert response.text == "example_response" assert response.status_code == 200
def test_expected_request_response_as_string(httpserver: HTTPServer): httpserver.expect_request("/foobar").respond_with_response(JSON_STRING) # type: ignore assert requests.get(httpserver.url_for("/foobar")).json() == {'foo': 'bar'}
def test_expected_request_response(httpserver: HTTPServer): httpserver.expect_request("/foobar").respond_with_response(Response(JSON_STRING)) assert requests.get(httpserver.url_for("/foobar")).json() == {'foo': 'bar'}
def server(): with HTTPServer() as httpserver: yield httpserver
def test_device_twin( self, azure_user: User, httpserver: HTTPServer, httpserver_ssl_context: ssl.SSLContext, ): """Test device state synchronization with IoT Hub Device Twin""" dev = self._prepare_device(azure_user, httpserver, httpserver_ssl_context) self._check_if_device_status_is_set_to_value(azure_user, httpserver, dev.id, "enabled") # get the all device states (device twins) if self.azure_iot_hub_mock: httpserver.expect_oneshot_request( re.compile("^/devices"), method="GET", query_string="api-version=2021-04-12", ).respond_with_json(self._prepare_iot_hub_upsert_device_response()) rsp = self.api_azure.with_auth(azure_user.token).call( "GET", iot.URL_DEVICE_STATE(dev.id)) assert rsp.status_code == 200 states = rsp.json() assert len(states.keys()) == 1 integration_id = list(states.keys())[0] assert "desired" in states[integration_id] assert "reported" in states[integration_id] # set the device state (device twin) if self.azure_iot_hub_mock: httpserver.expect_oneshot_request( re.compile("^/twins"), method="GET", query_string="api-version=2021-04-12", ).respond_with_json(self._prepare_iot_hub_upsert_device_response()) httpserver.expect_oneshot_request( re.compile("^/twins"), method="PUT", query_string="api-version=2021-04-12", ).respond_with_data(status=200) twin = { "desired": { "key": "value" }, } rsp = (self.api_azure.with_auth(azure_user.token).with_header( "Content-Type", "application/json").call( "PUT", iot.URL_DEVICE_STATE(dev.id) + "/" + integration_id, twin)) assert rsp.status_code == 200 state = rsp.json() assert "desired" in state assert "reported" in states[integration_id] assert state["desired"]["key"] == "value" # get the device state (device twin) if self.azure_iot_hub_mock: httpserver.expect_oneshot_request( re.compile("^/twins"), method="GET", query_string="api-version=2021-04-12", ).respond_with_json(self._prepare_iot_hub_upsert_device_response()) rsp = self.api_azure.with_auth(azure_user.token).call( "GET", iot.URL_DEVICE_STATE(dev.id) + "/" + integration_id) assert rsp.status_code == 200 state = rsp.json() assert "desired" in state assert "reported" in states[integration_id] assert state["desired"]["key"] == "value"
def test_expected_request_handler(httpserver: HTTPServer): httpserver.expect_request("/foobar").respond_with_handler(lambda request: JSON_STRING) # type: ignore assert requests.get(httpserver.url_for("/foobar")).json() == {'foo': 'bar'}
def test_grabber(httpserver: HTTPServer): sample = get_sample("generic/h1-p-desc.html") class FakeResponse(Response): automatically_set_content_length = False httpserver.expect_request("/h1-p-desc").respond_with_data( sample, headers={"content-type": "text/html"}) httpserver.expect_request("/lazy").respond_with_handler( lambda x: sleep(0.6)) httpserver.expect_request("/huge").respond_with_response( FakeResponse( response=b"x" * 100000, mimetype='text/html', )) httpserver.expect_request("/badmime").respond_with_data( "{}", headers={"content-type": "application/json"}) httpserver.expect_request("/nomime").respond_with_response( Response(mimetype="")) httpserver.expect_request("/large").respond_with_response( FakeResponse( mimetype="text/html", headers={"content-length": "100000"}, )) # success grabber = LinkGrabber(maxsize=100) with pytest.raises(exceptions.MaximumContentSizeError): grabber.get_content(httpserver.url_for("/h1-p-desc")) # initial timeout grabber = LinkGrabber(initial_timeout=0.5) with pytest.raises(ReadTimeout): grabber.get_content(httpserver.url_for("/lazy")) # receive timeout grabber = LinkGrabber(receive_timeout=0.1, chunk_size=1) with pytest.raises(TimeoutError): grabber.get_content(httpserver.url_for("/huge")) # maxsize grabber = LinkGrabber(receive_timeout=10000, chunk_size=1024, maxsize=20) with pytest.raises(exceptions.MaximumContentSizeError): grabber.get_content(httpserver.url_for("/huge")) # large grabber = LinkGrabber(maxsize=100) with pytest.raises(exceptions.MaximumContentSizeError): grabber.get_content(httpserver.url_for("/large")) # nomime grabber = LinkGrabber() with pytest.raises(exceptions.InvalidContentError): grabber.get_content(httpserver.url_for("/nomime")) # badmime grabber = LinkGrabber() with pytest.raises(exceptions.InvalidContentError): grabber.get_content(httpserver.url_for("/badmime"))
def test_amplitude_metadata(httpserver: HTTPServer): httpserver.expect_request( re.compile('/(events|identify)')).respond_with_data() options = AmplitudeOptions( events_endpoint=httpserver.url_for('/events'), identification_endpoint=httpserver.url_for('/identify'), metadata=AmplitudeMetadata(os_name='ubuntu', city="York", os_version="111.0")) p = AmplitudePlugin('My-Key', options) try: p.load( PluginLoadOptions(environment=Environment.DEVELOPMENT, logger=Logger.NONE)) p.identify("user-1", Properties(item1='value1', item2=2)) metadata = { "amplitude": AmplitudeMetadata(platform="LinUx", os_version="123.45") } p.track( "user-2", Event('event-1', Properties(item1='value1', item2=1), metadata=metadata)) metadata = { "amplitude": AmplitudeMetadata(os_name="win", os_version="987.45") } p.track( "user-1", Event('event-2', Properties(item1='value2', item2=2), metadata=metadata)) p.flush() time.sleep(0.1) requests = _get_cleaned_requests(httpserver) assert requests == [ [{ 'city': 'York', 'os_name': 'ubuntu', 'os_version': '111.0', 'user_id': 'user-1', 'user_properties': { 'item1': 'value1', 'item2': 2 } }], { 'api_key': 'My-Key', 'events': [{ 'user_id': 'user-2', 'event_type': 'event-1', 'event_properties': { 'item1': 'value1', 'item2': 1 }, 'city': 'York', 'platform': 'LinUx', 'os_name': 'ubuntu', 'os_version': "123.45" }, { 'user_id': 'user-1', 'event_type': 'event-2', 'event_properties': { 'item1': 'value2', 'item2': 2 }, 'city': 'York', 'os_name': 'win', 'os_version': "987.45" }], }, ] finally: p.shutdown() time.sleep(0.1) httpserver.stop()
def test_rss_basic_flow(self, httpserver: HTTPServer): # Setup the response for the NZB nzb_data = create_and_read_nzb("basic_rar5") httpserver.expect_request("/test_nzb.nzb").respond_with_data(nzb_data) nzb_url = httpserver.url_for("/test_nzb.nzb") # Set the response for the RSS-feed, replacing the URL to the NZB with open(os.path.join(SAB_DATA_DIR, "rss_feed_test.xml")) as rss_file: rss_data = rss_file.read() rss_data = rss_data.replace("NZB_URL", nzb_url) httpserver.expect_request("/rss_feed.xml").respond_with_data(rss_data) rss_url = httpserver.url_for("/rss_feed.xml") # Test if base page works self.open_page("http://%s:%s/sabnzbd/config/rss" % (SAB_HOST, SAB_PORT)) # Uncheck enabled-checkbox for new feeds self.selenium_wrapper( self.driver.find_element_by_xpath, '//form[@action="add_rss_feed"]//input[@name="enable"]').click() input_name = self.selenium_wrapper( self.driver.find_element_by_xpath, '//form[@action="add_rss_feed"]//input[@name="feed"]') input_name.clear() input_name.send_keys(self.rss_name) self.selenium_wrapper( self.driver.find_element_by_xpath, '//form[@action="add_rss_feed"]//input[@name="uri"]').send_keys( rss_url) self.selenium_wrapper( self.driver.find_element_by_xpath, '//form[@action="add_rss_feed"]//button').click() # Check if we have results tab_results = int( self.selenium_wrapper(self.driver.find_element_by_xpath, '//a[@href="#rss-tab-matched"]/span').text) assert tab_results > 0 # Check if it matches the number of rows tab_table_results = len( self.driver.find_elements_by_xpath( '//div[@id="rss-tab-matched"]/table/tbody/tr')) assert tab_table_results == tab_results # Pause the queue do we don't download stuff assert get_api_result("pause") == {"status": True} # Download something download_btn = self.selenium_wrapper( self.driver.find_element_by_xpath, '//div[@id="rss-tab-matched"]/table/tbody//button') download_btn.click() self.wait_for_ajax() # Does the page think it's a success? assert "Added NZB" in download_btn.text # Wait 2 seconds for the fetch time.sleep(2) # Let's check the queue for _ in range(10): queue_result_slots = get_api_result("queue")["queue"]["slots"] # Check if the fetch-request was added to the queue if queue_result_slots: break time.sleep(1) else: # The loop never stopped, so we fail pytest.fail("Did not find the RSS job in the queue") return # Let's remove this thing get_api_result("queue", extra_arguments={ "name": "delete", "value": "all" }) assert len(get_api_result("queue")["queue"]["slots"]) == 0 # Unpause assert get_api_result("resume") == {"status": True}
def test_expected_request_json(httpserver: HTTPServer): httpserver.expect_request("/foobar").respond_with_json({"foo": "bar"}) assert requests.get(httpserver.url_for("/foobar")).json() == {'foo': 'bar'}