def test_has_status_page(self, client, tmp_db, bulac_prov): response = client.get('/status') # We have an answer... assert response.status_code == OK # ...it's an HTML page assert response.mimetype == "text/html" # Let's convert it for easy inspection data = response.get_data(as_text=True) # Test if we recieved a full HTML page assert is_html5_page(data) assert has_page_title(data, "Mincer Status report") assert has_header_title(data, "Mincer") assert has_header_subtitle(data, "Status report") assert has_table(data) assert "Provider's name" in all_table_column_headers(data) assert "Server online?" in all_table_column_headers(data) assert "Server responding?" in all_table_column_headers(data) assert "Correctly formed answer?" in all_table_column_headers(data) with mincer.app.test_request_context('/status'): links = all_links(data) # Test the presence of essencial links assert url_for("provider_status", provider_slug="koha-search") in links assert url_for("provider_status", provider_slug="koha-booklist") in links
def test_links_are_fullpath(self, client, tmp_db, koha_booklist_prov): # We are using the ID of of an existing list LIST_ID = "9896" url = self._build_url(LIST_ID) response = client.get(url) # Let's convert it for easy inspection data = response.get_data(as_text=True) links = all_links(data) assert len(links) > 0 for l in links: assert is_absolute_url(l)
def test_links_are_fullpath(self, client, tmp_db, koha_search_prov): # This search returns only a few results SEARCH_QUERY = 'afrique voiture' url = self._build_url(SEARCH_QUERY) response = client.get(url) # Let's convert it for easy inspection data = response.get_data(as_text=True) links = all_links(data) assert len(links) > 0 for l in links: assert is_absolute_url(l)
def test_has_home_page(self, client, tmp_db, bulac_prov): response = client.get('/') # We have an answer... assert response.status_code == OK # ...it's an HTML document... assert response.mimetype == "text/html" # Let's convert it for easy inspection data = response.get_data(as_text=True) # Test if we recieved a full HTML page assert is_html5_page(data) assert has_page_title(data, "Mincer Home") assert has_header_title(data, "Mincer") assert has_header_subtitle(data, "Home") # Test the presence of essenciel links with mincer.app.test_request_context('/'): links = all_links(data) # Do we have providers view links assert url_for("provider_status", provider_slug="koha-search") in links assert url_for("provider_status", provider_slug="koha-booklist") in links # TODO add direct link to edit provider # # Do we have providers edit links? # assert url_for( # "providers", # provider_slug="koha-search") in links # assert url_for( # "providers", # provider_slug="koha-booklist") in links # Do we have providers remove links? # Do we have new provider link? assert url_for("provider_new") in links # Do we have admin links? assert url_for("status") in links assert url_for("admin") in links
def test_returned_links_are_fullpath(self, client, tmp_db, fake_serv, fake_prov): # We are using the ID of of an existing list QUERY = "search with links" URL = self._build_url_from_query(QUERY) response = client.get(URL) # We have an answer... assert response.status_code == OK # ...it's an HTML document assert response.mimetype == "text/html" # Let's convert it for easy inspection data = response.get_data(as_text=True) links = all_links(data) assert len(links) > 0 for l in links: assert is_absolute_url(l)
def test_has_admin_page(self, client, tmp_db): response = client.get('/admin') # We have an answer... assert response.status_code == OK # ...it's an HTML page assert response.mimetype == "text/html" # Let's convert it for easy inspection data = response.get_data(as_text=True) # Test if we recieved a full HTML page assert is_html5_page(data) assert has_page_title(data, "Mincer Administration") assert has_header_title(data, "Mincer") assert has_header_subtitle(data, "Administration") assert has_form(data) # Do we have the essential info in it form_groups = all_form_groups(data) # Do we have all the fields needed with the correct initial values ? dependencies = {e.name: e for e in Dependency.query.all()} assert form_groups["JQuery minified javascript"]\ == dependencies["jquery-js"].url assert form_groups["JQuery minified javascript SHA"]\ == dependencies["jquery-js"].sha assert form_groups["Popper minified javascript"]\ == dependencies["popper-js"].url assert form_groups["Popper minified javascript SHA"]\ == dependencies["popper-js"].sha assert form_groups["Bootstrap minified javascript"]\ == dependencies["bootstrap-js"].url assert form_groups["Bootstrap minified javascript SHA"]\ == dependencies["bootstrap-js"].sha assert form_groups["Bootstrap minified CSS"]\ == dependencies["bootstrap-css"].url assert form_groups["Bootstrap minified CSS SHA"]\ == dependencies["bootstrap-css"].sha assert form_groups["Font-Awesome minified CSS"]\ == dependencies["font-awesome-css"].url assert form_groups["Font-Awesome minified CSS SHA"]\ == dependencies["font-awesome-css"].sha # Do we have a button to validate the form ? assert has_form_submit_button(data) # Do we have the good links to all ressources ? links = all_links(data) # Places to get the ressources assert "https://code.jquery.com/" in links assert "https://github.com/FezVrasta/popper.js#installation" in links assert "https://www.bootstrapcdn.com/" in links # Hash generator to ensure the ressources are correct using SRI assert "https://www.srihash.org/" in links # Doc about SRI assert "https://hacks.mozilla.org/2015/09/subresource-integrity-in-firefox-43/" in links