def test_websearch_admin_interface_pages_availability(self): """websearchadmin - availability of WebSearch Admin interface pages""" baseurl = CFG_SITE_URL + '/admin/websearch/websearchadmin.py' _exports = ['', '?mtype=perform_showall', '?mtype=perform_addcollection', '?mtype=perform_addcollectiontotree', '?mtype=perform_modifycollectiontree', '?mtype=perform_checkwebcollstatus', '?mtype=perform_checkcollectionstatus',] error_messages = [] for url in [baseurl + page for page in _exports]: # first try as guest: error_messages.extend(test_web_page_content(url, username='******', expected_text= 'Authorization failure')) # then try as admin: error_messages.extend(test_web_page_content(url, username='******')) if error_messages: self.fail(merge_error_messages(error_messages)) return
def test_update_titles_of_old_linkbacks(self): """weblinkback - test update titles of old linkbacks""" self.assertNotEqual([], test_web_page_content(CFG_SITE_URL + '/%s/41/linkbacks/sendtrackback?url=http://www.google.au&title=Google' % CFG_SITE_RECORD, username='******')) self.assertNotEqual([], test_web_page_content(CFG_SITE_URL + '/%s/41/linkbacks/sendtrackback?url=http://www.google.at' % CFG_SITE_RECORD, username='******')) self.assertNotEqual([], test_web_page_content(CFG_SITE_URL + '/%s/41/linkbacks/sendtrackback?url=http://www.google.co.za&title=Google' % CFG_SITE_RECORD, username='******')) update_url_title("http://www.google.au", "Google AU") p = patch('invenio.weblinkback.get_title_of_page', get_title_of_page_mock1) p.start() update_linkbacks(2) url_entries = self.get_all_from_table("lnkENTRYURLTITLE") self.assertEqual(get_title_of_page_mock1(), url_entries[0][2]) self.assertEqual("", url_entries[1][2]) self.assertEqual("Google", url_entries[2][2]) update_linkbacks(1) p.stop() p = patch('invenio.weblinkback.get_title_of_page', get_title_of_page_mock2) p.start() update_linkbacks(2) url_entries = self.get_all_from_table("lnkENTRYURLTITLE") self.assertEqual(get_title_of_page_mock2(), url_entries[0][2]) self.assertEqual(get_title_of_page_mock2(), url_entries[1][2]) self.assertEqual("Google", url_entries[2][2]) p.stop()
def test_get_approved_latest_added_linkback(self): """weblinkback - get approved latest added linkbacks""" for linkbackid in (1, 2, 5, 6, 7): approve_linkback(linkbackid + self._max_id_lnkENTRY, self.user_info) reject_linkback(4 + self._max_id_lnkENTRY, self.user_info) self.assertEqual(0, len(get_approved_latest_added_linkbacks(0))) self.assertEqual(1, len(get_approved_latest_added_linkbacks(1))) self.assertEqual(2, len(get_approved_latest_added_linkbacks(2))) self.assertEqual(3, len(get_approved_latest_added_linkbacks(3))) self.assertEqual(4, len(get_approved_latest_added_linkbacks(4))) self.assertEqual(5, len(get_approved_latest_added_linkbacks(5))) approved_linkbacks = get_approved_latest_added_linkbacks(6) self.assertEqual(5, len(approved_linkbacks)) self.assertEqual(1 + self._max_id_lnkENTRY, approved_linkbacks[0][0]) self.assertEqual(2 + self._max_id_lnkENTRY, approved_linkbacks[1][0]) self.assertEqual(5 + self._max_id_lnkENTRY, approved_linkbacks[2][0]) self.assertEqual(6 + self._max_id_lnkENTRY, approved_linkbacks[3][0]) self.assertEqual(7 + self._max_id_lnkENTRY, approved_linkbacks[4][0]) url = CFG_SITE_URL + '/linkbacks/' expected_texts = ('URL1', 'URL2', 'URL5', 'URL6', 'URL7') for text in expected_texts: self.assertEqual([], test_web_page_content(url, username='******', expected_text=text)) unexpected_texts = ('URL3', 'URL4', 'URL8') for text in unexpected_texts: self.assertEqual([], test_web_page_content(url, username='******', unexpected_text=text))
def test_send_trackback(self): """weblinkback - create linkback through /sendtrackback""" url = CFG_SITE_URL + '/%s/42/linkbacks/sendtrackback?url=http://www.google.ch' % CFG_SITE_RECORD self.assertEqual([], test_web_page_content(url, username='******', expected_text='<response></response>', unexpected_text='<error>')) url = CFG_SITE_URL + '/%s/42/linkbacks/sendtrackback' % CFG_SITE_RECORD self.assertNotEqual([], test_web_page_content(url, username='******')) url = CFG_SITE_URL + '/%s/42/linkbacks/sendtrackback?url' % CFG_SITE_RECORD self.assertNotEqual([], test_web_page_content(url, username='******')) url = CFG_SITE_URL + '/%s/42/linkbacks/sendtrackback?url=' % CFG_SITE_RECORD self.assertNotEqual([], test_web_page_content(url, username='******')) add_url_to_list('google', CFG_WEBLINKBACK_LIST_TYPE['BLACKLIST'], self.user_info) url = CFG_SITE_URL + '/%s/42/linkbacks/sendtrackback?url=http://www.google.ch' % CFG_SITE_RECORD self.assertNotEqual([], test_web_page_content(url, username='******'))
def test_access_public_record_public_discussion_public_comment(self): """webcomment - accessing "public" comment in a "public" discussion of a restricted record""" # Guest user should not be able to access it self.assertNotEqual( [], test_web_page_content( "%s/record/%i/comments/" % (CFG_SITE_URL, self.restr_record), expected_text=self.msg2 ), ) # Accessing a non existing file for a restricted comment should also ask to login self.assertEqual( [], test_web_page_content( "%s/record/%i/comments/attachments/get/%i/not_existing_file" % (CFG_SITE_URL, self.restr_record, self.restr_comid_1), expected_text="You can use your nickname or your email address to login", ), ) # Check accessing file of a restricted comment self.assertEqual( [], test_web_page_content( "%s/record/%i/comments/attachments/get/%i/file2" % (CFG_SITE_URL, self.restr_record, self.restr_comid_1), expected_text="You can use your nickname or your email address to login", ), )
def test_precached_area_authorization(self): """webaccess - login-time precached authorizations for usebaskets""" error_messages = test_web_page_content(CFG_SITE_SECURE_URL + '/youraccount/display?ln=en', username='******', password='******', expected_text='Your Baskets') error_messages.extend(test_web_page_content(CFG_SITE_SECURE_URL + '/youraccount/display?ln=en', username='******', password='******', unexpected_text='Your Baskets')) if error_messages: self.fail(merge_error_messages(error_messages))
def test_bibindex_admin_interface_pages_availability(self): """bibindexadmin - availability of BibIndex Admin interface pages""" baseurl = CFG_SITE_URL + '/admin/bibindex/bibindexadmin.py/' _exports = ['', 'index', 'index?mtype=perform_showindexoverview', 'index?mtype=perform_editindexes', 'index?mtype=perform_addindex', 'field', 'field?mtype=perform_showfieldoverview', 'field?mtype=perform_editfields', 'field?mtype=perform_addfield', ] error_messages = [] for url in [baseurl + page for page in _exports]: # first try as guest: error_messages.extend(test_web_page_content(url, username='******', expected_text= 'Authorization failure')) # then try as admin: error_messages.extend(test_web_page_content(url, username='******')) if error_messages: self.fail(merge_error_messages(error_messages)) return
def test_twpc_expected_link_arg(self): """testutils - test_web_page_content() and expected_link argument""" # should find link to ALEPH: self.assertEqual( [], test_web_page_content( CFG_SITE_URL, expected_link_target=CFG_SITE_URL + "/collection/ALEPH?ln=" + CFG_SITE_LANG ), ) # should find link entitled ISOLDE: self.assertEqual([], test_web_page_content(CFG_SITE_URL, expected_link_label="ISOLDE")) # should find link to ISOLDE entitled ISOLDE: self.assertEqual( [], test_web_page_content( CFG_SITE_URL, expected_link_target=CFG_SITE_URL + "/collection/ISOLDE?ln=" + CFG_SITE_LANG, expected_link_label="ISOLDE", ), ) # should not find link to ALEPH entitled ISOLDE: errmsgs = test_web_page_content( CFG_SITE_URL, expected_link_target=CFG_SITE_URL + "/collection/ALEPH?ln=" + CFG_SITE_LANG, expected_link_label="ISOLDE", ) if errmsgs[0].find(" does not contain link to ") > -1: pass else: self.fail("Should not find link to ALEPH entitled ISOLDE.") return
def test_robot_login_method_merging_accounts(self): """webaccess - robot login method merging accounts""" for method_name, method in self.robot_login_methods.iteritems(): url = method.test_create_example_url(self.a_email, method_name, self.a_robot, self.myip, nickname=self.a_nickname) url2 = method.test_create_example_url(self.another_email, method_name, self.a_robot, self.myip, nickname=self.another_nickname) url3 = method.test_create_example_url(self.a_email, method_name, self.a_robot, self.myip, nickname=self.another_nickname) try: error_messages = test_web_page_content(url, expected_text=self.a_nickname) if error_messages: self.fail(merge_error_messages(error_messages)) id_user = run_sql("SELECT id FROM user WHERE email=%s", (self.a_email, ))[0][0] self.failUnless(run_sql("SELECT * FROM userEXT WHERE id=%s AND id_user=%s AND method=%s", (self.a_nickname, id_user, method_name))) error_messages = test_web_page_content(url2, expected_text=self.another_nickname) if error_messages: self.fail(merge_error_messages(error_messages)) id_user2 = run_sql("SELECT id FROM user WHERE email=%s", (self.another_email, ))[0][0] self.failIfEqual(id_user, id_user2) self.failUnless(run_sql("SELECT * FROM userEXT WHERE id=%s AND id_user=%s AND method=%s", (self.another_nickname, id_user2, method_name)), "Can't find id %s for user %s with metod %s. userEXT contains: %s" % (self.another_nickname, id_user2, method_name, run_sql("SELECT * FROM userEXT"))) ## The first email should still exists self.failUnless(run_sql("SELECT * FROM user WHERE email=%s", (self.a_email, ))) ## We log in with the 1st email but with the second nickname. ## That means the 1st user should be merged into the second. error_messages = test_web_page_content(url3, expected_text=self.another_nickname) if error_messages: self.fail(merge_error_messages(error_messages)) ## The another_email should not exist any longer. self.failIf(run_sql("SELECT * FROM user WHERE email=%s", (self.another_email, )), "%s still exists! while it should have been merged into %s: %s, userEXT contains: %s" % (self.another_email, self.a_email, run_sql("SELECT * FROM user WHERE email=%s", (self.another_email, )), run_sql("SELECT * FROM userEXT"))) ## And the corresponding user should not exist anymore as it has been ## merged into id_user self.failIf(run_sql("SELECT * FROM user WHERE id=%s", (id_user2, ))) self.failUnless(run_sql("SELECT * FROM user WHERE id=%s AND email=%s", (id_user, self.a_email))) finally: self._erase_example_user_and_groups()
def test_detailed_html_output(self): """bibformat - Detailed HTML output""" # Test record 74 (Article) pageurl = CFG_SITE_URL + '/record/74?of=hd' result = test_web_page_content(pageurl, expected_text=[self.record_74_hd_header, self.record_74_hd_title, self.record_74_hd_authors, self.record_74_hd_abstract, self.record_74_hd_pubinfo, self.record_74_hd_fulltext, #self.record_74_hd_citations, #self.record_74_hd_references ]) self.assertEqual([], result) # Test record 7 (Picture) pageurl = CFG_SITE_URL + '/record/7?of=hd' result = test_web_page_content(pageurl, expected_text=[self.record_7_hd_header, self.record_7_hd_title, self.record_7_hd_date, self.record_7_hd_abstract, self.record_7_hd_resource, self.record_7_hd_resource_link]) self.assertEqual([], result)
def test_article_in_unreleased_issue(self): """webjournal - check access to unreleased article""" # Record is not public self.assertEqual(record_public_p(112), False) # Unreleased article is not visible to guest error_messages = test_web_page_content( CFG_SITE_URL + "/journal/AtlantisTimes/2009/06/News/112", expected_text=["A naturalist's voyage around the world"], unexpected_text=["Galapagos Archipelago"], ) if error_messages: self.fail(merge_error_messages(error_messages)) # Unreleased article is visible to editor error_messages = test_web_page_content( CFG_SITE_URL + "/journal/AtlantisTimes/2009/06/News/112", username="******", password="******", expected_text=["Galapagos Archipelago"], unexpected_text=["This file is restricted", "You are not authorized"], ) if error_messages: self.fail(merge_error_messages(error_messages))
def test_access_public_record_restricted_discussion_public_comment(self): """webcomment - accessing "public" comment in a restricted discussion of a public record""" # Guest user should not be able to access it self.assertNotEqual([], test_web_page_content("%s/record/%i/comments/" % (CFG_SITE_URL, self.restricted_discussion), expected_text=self.msg2)) # Accessing a non existing file for a restricted comment should also ask to login self.assertEqual([], test_web_page_content("%s/record/%i/comments/attachments/get/%i/not_existing_file" % \ (CFG_SITE_URL, self.restricted_discussion, self.restr_comid_5), expected_text='You can use your nickname or your email address to login')) # Check accessing file of a restricted comment self.assertEqual([], test_web_page_content("%s/record/%i/comments/attachments/get/%i/file2" % \ (CFG_SITE_URL, self.restricted_discussion, self.restr_comid_5), expected_text='You can use your nickname or your email address to login')) # Juliet should not be able to access the comment br = Browser() br.open(CFG_SITE_URL + '/youraccount/login') br.select_form(nr=0) br['p_un'] = 'juliet' br['p_pw'] = 'j123uliet' br.submit() br.open("%s/record/%i/comments/" % (CFG_SITE_URL, self.restricted_discussion)) response = br.response().read() if not self.msg6 in response: pass else: self.fail("Oops, this user should not have access to this comment") # Juliet should not be able to access the attached files br.open("%s/record/%i/comments/attachments/get/%i/file2" % \ (CFG_SITE_URL, self.restricted_discussion, self.restr_comid_5)) response = br.response().read() if "You are not authorized" in response: pass else: self.fail("Oops, this user should not have access to this comment attachment") # Romeo should be able to access the comment br = Browser() br.open(CFG_SITE_URL + '/youraccount/login') br.select_form(nr=0) br['p_un'] = 'romeo' br['p_pw'] = 'r123omeo' br.submit() br.open("%s/record/%i/comments/" % (CFG_SITE_URL, self.restricted_discussion)) response = br.response().read() if not self.msg6 in response: self.fail("Oops, this user should have access to this comment") # Romeo should be able to access the attached files br.open("%s/record/%i/comments/attachments/get/%i/file2" % \ (CFG_SITE_URL, self.restricted_discussion, self.restr_comid_5)) response = br.response().read() self.assertEqual(self.attached_file2_content, response)
def test_i18n_ranking_method_names(self): """bibrank - I18N ranking method names""" self.assertEqual([], test_web_page_content(CFG_SITE_URL + '/collection/Articles%20%26%20Preprints?as=1', expected_text="times cited")) self.assertEqual([], test_web_page_content(CFG_SITE_URL + '/collection/Articles%20%26%20Preprints?as=1', expected_text="journal impact factor"))
def test_get_similar_ranked(self): """solrutils - web similar results""" self.assertEqual([], test_web_page_content(CFG_SITE_URL + '/search?of=id&p=recid%3A30&rm=wrd', expected_text="[1, 3, 4, 8, 9, 12, 13, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 31, 34, 43, 44, 49, 50, 56, 58, 61, 64, 66, 67, 69, 71, 73, 75, 76, 77, 78, 82, 85, 86, 87, 89, 90, 95, 96, 98, 104, 107, 109, 113, 65, 62, 60, 47, 46, 100, 99, 102, 91, 80, 7, 5, 92, 88, 74, 57, 55, 108, 84, 81, 79, 54, 101, 11, 103, 94, 48, 83, 72, 63, 2, 68, 51, 53, 97, 93, 70, 45, 52, 14, 59, 6, 10, 32, 33, 29, 30]")) self.assertEqual([], test_web_page_content(CFG_SITE_URL + '/search?of=id&p=recid%3A30&rg=100&rm=wrd', expected_text="[3, 4, 8, 9, 13, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 31, 34, 43, 49, 56, 66, 67, 69, 71, 73, 75, 76, 87, 90, 98, 104, 107, 109, 113, 12, 95, 85, 82, 44, 1, 89, 64, 58, 15, 96, 61, 50, 86, 78, 77, 65, 62, 60, 47, 46, 100, 99, 102, 91, 80, 7, 5, 92, 88, 74, 57, 55, 108, 84, 81, 79, 54, 101, 11, 103, 94, 48, 83, 72, 63, 2, 68, 51, 53, 97, 93, 70, 45, 52, 14, 59, 6, 10, 32, 33, 29, 30]"))
def test_availability_bibconvert_hacking_pages(self): """bibconvert - availability of BibConvert Hacking Guide pages""" self.assertEqual([], test_web_page_content(CFG_SITE_URL + '/help/hacking/bibconvert-internals', expected_text="BibConvert Internals")) self.assertEqual([], test_web_page_content(CFG_SITE_URL + '/help/hacking/bibconvert-api', expected_text="BibConvert API")) return
def test_xss_in_submission_page(self): """websubmit - no XSS vulnerability in access parameter""" self.assertEqual([], test_web_page_content(CFG_SITE_URL + '/submit?doctype=DEMOTHE&access=/../../../etc/passwd&act=SBI&startPg=1&ln=en&ln=en', expected_text='Invalid parameters', username="******", password="******")) self.assertEqual([], test_web_page_content(CFG_SITE_URL + '/submit?doctype=DEMOTHE&access=%3CSCRIPT%3Ealert%28%22XSS%22%29%3B%3C%2FSCRIPT%3E&act=SBI', expected_text='Invalid parameters', username="******", password="******"))
def test_legacy_help_page_link(self): """websubmit - legacy Submit Guide page link""" self.assertEqual([], test_web_page_content(CFG_SITE_URL + "/help/submit", expected_text="Submit Guide")) self.assertEqual([], test_web_page_content(CFG_SITE_URL + "/help/submit/", expected_text="Submit Guide")) self.assertEqual( [], test_web_page_content(CFG_SITE_URL + "/help/submit/index.en.html", expected_text="Submit Guide") ) self.assertEqual( [], test_web_page_content(CFG_SITE_URL + "/help/submit/access.en.html", expected_text="Submit Guide") )
def test_create_linkback2(self): """weblinkback - create linkback with URL title""" recid = 42 argd = {'url': 'URL', 'title': 'My title', 'excerpt': CFG_WEBLINKBACK_SUBSCRIPTION_DEFAULT_ARGUMENT_NAME, 'blog_name': 'Test Blog', 'id': CFG_WEBLINKBACK_SUBSCRIPTION_DEFAULT_ARGUMENT_NAME, 'source': CFG_WEBLINKBACK_SUBSCRIPTION_DEFAULT_ARGUMENT_NAME, } linkbackid = create_trackback(recid, argd['url'], argd['title'], argd['excerpt'], argd['blog_name'], argd['id'], argd['source'], self.user_info) self.assertEqual(10 + self._max_id_lnkENTRY, linkbackid) linkback = get_all_linkbacks(recid=recid)[5] self.assertEqual(linkbackid, linkback[0]) self.assertEqual(argd['url'], linkback[1]) self.assertEqual(recid, linkback[2]) self.assertEqual(CFG_WEBLINKBACK_TYPE['TRACKBACK'], linkback[4]) self.assertEqual(CFG_WEBLINKBACK_STATUS['PENDING'], linkback[5]) url = CFG_SITE_URL + '/%s/42/linkbacks/' % CFG_SITE_RECORD expected_texts = ('Linkbacks to review: 6', 'Linkbacks: 0') for text in expected_texts: self.assertEqual([], test_web_page_content(url, username='******', expected_text=text)) approve_linkback(linkbackid, self.user_info) linkback = get_all_linkbacks(recid=recid)[5] self.assertEqual(linkbackid, linkback[0]) self.assertEqual(argd['url'], linkback[1]) self.assertEqual(recid, linkback[2]) self.assertEqual(CFG_WEBLINKBACK_TYPE['TRACKBACK'], linkback[4]) self.assertEqual(CFG_WEBLINKBACK_STATUS['APPROVED'], linkback[5]) url_titles = self.get_all_from_table("lnkENTRYURLTITLE") self.assertEqual(1, len(url_titles)) self.assertEqual(argd['url'], url_titles[0][1]) self.assertEqual(argd['title'], url_titles[0][2]) self.assertEqual(1, url_titles[0][3]) self.assertEqual(argd['title'], get_url_title(argd['url'])) self.assertEqual(0, len(get_urls_and_titles(CFG_WEBLINKBACK_PAGE_TITLE_STATUS['OLD']))) self.assertEqual(0, len(get_urls_and_titles(CFG_WEBLINKBACK_PAGE_TITLE_STATUS['NEW']))) self.assertEqual(1, len(get_urls_and_titles(CFG_WEBLINKBACK_PAGE_TITLE_STATUS['MANUALLY_SET']))) url = CFG_SITE_URL + '/%s/42/linkbacks/' % CFG_SITE_RECORD expected_texts = ('Linkbacks to review: 5', 'Linkbacks: 1') for text in expected_texts: self.assertEqual([], test_web_page_content(url, username='******', expected_text=text))
def test_twpc_expected_text_arg(self): """testutils - test_web_page_content() and expected_text argument""" # should find HTML in an HTML page: self.assertEqual([], test_web_page_content(CFG_SITE_URL + "/search?p=ellis", expected_text="</html>")) # should not find HTML tag in an XML page: errmsgs = test_web_page_content(CFG_SITE_URL + "/search?p=ellis&of=xm") if errmsgs[0].find(" does not contain </html>") > -1: pass else: self.fail("Should not find </html> in an XML page.") return
def test_twpc_username_arg(self): """testutils - test_web_page_content() and username arguments""" # should login as admin without password: self.assertEqual([], test_web_page_content(CFG_SITE_URL, username="******", expected_text="</html>")) # should not login as admin with password: errmsgs = test_web_page_content(CFG_SITE_URL, username="******", password="******", expected_text="</html>") if errmsgs[0].find("ERROR: Cannot login as admin.") > -1: pass else: self.fail("Should not be able to login as admin with foo password.") return
def test_must_login(self): """Checks users are asked to login before deposit.""" res = test_web_page_content(CFG_SITE_SECURE_URL + '/deposit', username="******", password="", expected_text="Please Sign In") self.assertEqual(res, []) res = test_web_page_content(CFG_SITE_SECURE_URL + '/deposit', username="******", password="", unexpected_text="Please Sign In") self.assertEqual(res, [])
def test_availability_bibclassify_hacking_pages(self): """Tests the availability of BibClassify Hacking Guide pages""" self.assertEqual([], test_web_page_content(CFG_SITE_URL + '/help/hacking/bibclassify-internals', expected_text="BibClassify Internals")) self.assertEqual([], test_web_page_content(CFG_SITE_URL + '/help/hacking/bibclassify-hep-taxonomy', expected_text="The HEP taxonomy: rationale and extensions")) self.assertEqual([], test_web_page_content(CFG_SITE_URL + '/help/hacking/bibclassify-extraction-algorithm', expected_text="The code behind BibClassify: the extraction algorithm")) return
def test_nlm_output(self): """bibformat - NLM output""" pageurl = CFG_SITE_URL + '/record/70?of=xn' result = test_web_page_content(pageurl, expected_text=self.record_70_xn) try: self.assertEqual([], result) except AssertionError: result = test_web_page_content(pageurl, expected_text=self.record_70_xn.replace('<fpage/>', '<fpage></fpage>').replace('<lpage/>', '<lpage></lpage>')) self.assertEqual([], result)
def test_detailed_html_no_error_message(self): """bibformat - Detailed HTML output without error message""" # No error message should be displayed in the web interface, whatever happens pageurl = CFG_SITE_URL + '/record/74?of=hd' result = test_web_page_content(pageurl, username='******', expected_text=["Exception", "Could not"]) self.assertNotEqual([], result) pageurl = CFG_SITE_URL + '/record/7?of=hd' result = test_web_page_content(pageurl, username='******', expected_text=["Exception", "Could not"]) self.assertNotEqual([], result)
def test_your_baskets_pages_availability(self): """weblinkback - availability of /linkbacks pages""" error_messages = [] baseurl = CFG_SITE_URL + '/%s/10/linkbacks/' % CFG_SITE_RECORD _exports = ['', 'display', 'index', 'approve', 'reject'] for url in [baseurl + page for page in _exports]: error_messages.extend(test_web_page_content(url)) baseurl = CFG_SITE_URL + '/linkbacks/' error_messages.extend(test_web_page_content(baseurl)) if error_messages: self.fail(merge_error_messages(error_messages))
def test_web_search_restricted_collection_as_guest(self): """websearch - search for restricted collection as guest, from search page""" url = CFG_SITE_SECURE_URL + '/search?p=Theses&ln=en' error_messages = test_web_page_content(url, unexpected_text=['Looking for a particular collection?']) if error_messages: self.fail(merge_error_messages(error_messages))
def test_oai_server_pages_availability(self): """oairepository - availability of OAI server pages""" baseurl = CFG_SITE_URL + '/oai2d' _exports = [#fast commands first: '?verb=Identify', '?verb=ListMetadataFormats', # sleepy commands now: '?verb=ListSets', '?verb=ListRecords', '?verb=GetRecord'] error_messages = [] for url in [baseurl + page for page in _exports]: if url.endswith('Identify') or \ url.endswith('ListMetadataFormats'): pass else: # some sleep required for verbs other than Identify # and ListMetadataFormats, since oai2d refuses too # frequent access: time.sleep(CFG_OAI_SLEEP) error_messages.extend(test_web_page_content(url, expected_text= '</OAI-PMH>')) if error_messages: self.fail(merge_error_messages(error_messages)) return
def test_marc_output(self): """bibformat - MARC output""" pageurl = CFG_SITE_URL + '/record/29?of=hm' result = test_web_page_content(pageurl, expected_text=self.record_29_hm) self.assertEqual([], result)
def test_marcxml_output(self): """bibformat - MARCXML output""" pageurl = CFG_SITE_URL + '/%s/9?of=xm' % CFG_SITE_RECORD result = test_web_page_content(pageurl, expected_text=[self.record_9_xm_beg, self.record_9_xm_end]) self.assertEqual([], result)
def test_bibtex_output(self): """bibformat - BibTeX output""" pageurl = CFG_SITE_URL + '/record/74?of=hx' result = test_web_page_content(pageurl, expected_text=self.record_74_hx) self.assertEqual([], result)
def test_webcomment_admin_interface_availability(self): """webcomment - availability of WebComment Admin interface pages""" baseurl = CFG_SITE_URL + '/admin/webcomment/webcommentadmin.py/' _exports = ['', 'comments', 'delete', 'users'] error_messages = [] for url in [baseurl + page for page in _exports]: # first try as guest: error_messages.extend( test_web_page_content(url, username='******', expected_text='Authorization failure')) # then try as admin: error_messages.extend(test_web_page_content(url, username='******')) if error_messages: self.fail(merge_error_messages(error_messages)) return
def test_bibedit_admin_interface_availability(self): """bibedit - availability of BibEdit Admin interface pages""" baseurl = CFG_SITE_URL + '/admin/bibedit/bibeditadmin.py/' _exports = ['', 'index', 'edit', 'submit'] error_messages = [] for url in [baseurl + page for page in _exports]: # first try as guest: error_messages.extend( test_web_page_content(url, username='******', expected_text='Authorization failure')) # then try as admin: error_messages.extend(test_web_page_content(url, username='******')) if error_messages: self.fail(merge_error_messages(error_messages)) return
def test_stats_pages_availability(self): """webstat - availability of /stats pages""" baseurl = CFG_SITE_URL + '/stats/' _exports = [ '', 'collection_population', 'search_frequency', 'search_type_distribution', 'download_frequency' ] error_messages = [] if CFG_WEBSESSION_DIFFERENTIATE_BETWEEN_GUESTS: for url in [baseurl + page for page in _exports]: error_messages.extend(test_web_page_content(url)) for url in [baseurl + page for page in _exports]: error_messages.extend(test_web_page_content(url, username='******')) if error_messages: self.fail(merge_error_messages(error_messages)) return
def test_oaiharvestadmin_interface_pages_availability(self): """oaiharvestadmin - availability of OAI Harvest Admin interface pages""" baseurl = CFG_SITE_URL + '/admin/bibharvest/oaiharvestadmin.py/' _exports = ['', 'editsource', 'addsource', 'delsource'] error_messages = [] for url in [baseurl + page for page in _exports]: # first try as guest: error_messages.extend( test_web_page_content(url, username='******', expected_text='Authorization failure')) # then try as admin: error_messages.extend(test_web_page_content(url, username='******')) if error_messages: self.fail(merge_error_messages(error_messages)) return
def test_search_results_ranked_by_citations_verbose(self): """bibrank - search results ranked by number of citations, verbose output""" self.assertEqual( [], test_web_page_content( CFG_SITE_URL + '/search?cc=Articles+%26+Preprints&p=Klebanov&rm=citation&verbose=2', username="******", expected_text= "find_citations retlist [[85, 0], [77, 2], [84, 3]]"))
def test_normal_robot_login_method(self): """webaccess - robot login method""" for method_name, method in self.robot_login_methods.iteritems(): url = method.test_create_example_url(self.a_email, method_name, self.a_robot, self.myip) try: error_messages = test_web_page_content(url, expected_text=self.a_email) if error_messages: self.fail(merge_error_messages(error_messages)) finally: self._erase_example_user_and_groups()
def test_admin_pages_availability(self): """bibcirculation - availability of main admin page""" baseurl = CFG_SITE_URL + '/admin/bibcirculation/bibcirculationadmin.py' self.assertEqual([], test_web_page_content( baseurl, expected_text="BibCirculation Admin")) return
def test_robot_login_method_wrong_ip(self): """webaccess - robot login method wrong IP""" for method_name, method in self.robot_login_methods.iteritems(): url = method.test_create_example_url(self.a_email, method_name, self.a_robot, '123.123.123.123') try: error_messages = test_web_page_content(url, expected_text="The provided assertion has been issued for a different IP address") if error_messages: self.fail(merge_error_messages(error_messages)) finally: self._erase_example_user_and_groups()
def test_xss_in_submission_act(self): """websubmit - no XSS vulnerability in act parameter""" self.assertEqual( [], test_web_page_content( CFG_SITE_URL + '/submit?doctype=DEMOTHE&access=1_1&act=%3CSCRIPT%3Ealert%28%22XSS%22%29%3B%3C%2FSCRIPT%3E', expected_text='Invalid doctype and act parameters', username="******", password="******"))
def test_brief_html_output(self): """bibformat - Brief HTML output""" pageurl = CFG_SITE_URL + '/%s/76?of=HB' % CFG_SITE_RECORD result = test_web_page_content(pageurl, expected_text=[ self.record_76_hb_title, self.record_76_hb_author, self.record_76_hb_body ]) self.assertEqual([], result)
def test_web_search_public_submission_as_guest(self): """websearch - search for public submission as guest, from search page""" url = CFG_SITE_SECURE_URL + '/search?p=submit%20article&ln=en' error_messages = test_web_page_content( url, expected_text='Looking for a particular submission?', expected_link_label='Demo Article Submission', expected_link_target=CFG_SITE_SECURE_URL + '/submit?doctype=DEMOART&ln=en') if error_messages: self.fail(merge_error_messages(error_messages))
def test_xss_in_submission_doctype(self): """websubmit - no XSS vulnerability in doctype parameter""" self.assertEqual( [], test_web_page_content( CFG_SITE_URL + '/submit?doctype=%3CSCRIPT%3Ealert%28%22XSS%22%29%3B%3C%2FSCRIPT%3E', expected_text= 'Unable to find document type: <SCRIPT>alert("XSS")', username="******", password="******"))
def test_web_search_restricted_submission_as_guest(self): """websearch - search for restricted submission as guest, from search page""" url = CFG_SITE_SECURE_URL + '/search?p=submit%20thesis&ln=en' error_messages = test_web_page_content( url, unexpected_text=[ 'Demo Thesis Submission', 'Looking for a particular submission?' ]) if error_messages: self.fail(merge_error_messages(error_messages))
def test_webaccess_admin_interface_availability(self): """webaccess - availability of WebAccess Admin interface pages""" baseurl = CFG_SITE_URL + '/admin/webaccess/webaccessadmin.py/' _exports = ['', 'delegate_startarea', 'manageaccounts', 'rolearea', 'actionarea', 'userarea', 'managerobotlogin', 'listgroups', 'resetarea', ''] error_messages = [] for url in [baseurl + page for page in _exports]: # first try as guest: error_messages.extend(test_web_page_content(url, username='******', expected_text= 'Authorization failure')) # then try as admin: error_messages.extend(test_web_page_content(url, username='******')) if error_messages: self.fail(merge_error_messages(error_messages)) return
def test_availability_bibclassify_hacking_pages(self): """bibclassify - availability of BibClassify Hacking Guide pages""" self.assertEqual( [], test_web_page_content(CFG_SITE_URL + '/help/hacking/bibclassify-internals', expected_text="BibClassify Internals")) self.assertEqual( [], test_web_page_content( CFG_SITE_URL + '/help/hacking/bibclassify-hep-taxonomy', expected_text="The HEP taxonomy: rationale and extensions")) self.assertEqual( [], test_web_page_content( CFG_SITE_URL + '/help/hacking/bibclassify-extraction-algorithm', expected_text= "The code behind BibClassify: the extraction algorithm")) return
def test_robot_login_method_with_groups(self): """webaccess - robot login method with groups""" for method_name, method in self.robot_login_methods.iteritems(): url = method.test_create_example_url(self.a_email, method_name, self.a_robot, self.myip, groups=self.some_groups, referer=CFG_SITE_SECURE_URL + "/yourgroups/display") try: for group in self.some_groups: error_messages = test_web_page_content(url, expected_text="%s [%s]" % (group, method_name)) if error_messages: self.fail(merge_error_messages(error_messages)) finally: self._erase_example_user_and_groups()
def test_access_public_record_public_discussion_public_comment(self): """webcomment - accessing "public" comment in a "public" discussion of a restricted record""" # Guest user should not be able to access it self.assertNotEqual( [], test_web_page_content("%s/record/%i/comments/" % (CFG_SITE_URL, self.restr_record), expected_text=self.msg2)) # Accessing a non existing file for a restricted comment should also ask to login self.assertEqual([], test_web_page_content("%s/record/%i/comments/attachments/get/%i/not_existing_file" % \ (CFG_SITE_URL, self.restr_record, self.restr_comid_1), expected_text='You can use your nickname or your email address to login')) # Check accessing file of a restricted comment self.assertEqual([], test_web_page_content("%s/record/%i/comments/attachments/get/%i/file2" % \ (CFG_SITE_URL, self.restr_record, self.restr_comid_1), expected_text='You can use your nickname or your email address to login'))
def test_robot_login_method_with_nickname(self): """webaccess - robot login method with nickname""" for method_name, method in self.robot_login_methods.iteritems(): if method.enforce_external_nicknames: url = method.test_create_example_url(self.a_email, method_name, self.a_robot, self.myip, nickname=self.a_nickname) try: error_messages = test_web_page_content(url, expected_text=self.a_nickname) if error_messages: self.fail(merge_error_messages(error_messages)) finally: self._erase_example_user_and_groups()
def test_web_search_public_collection_as_guest(self): """websearch - search for public collection as guest, with CollectionNameSearchService""" url = CFG_SITE_SECURE_URL + '/search?p=Atlantis%20Times%20Arts&ln=en' error_messages = test_web_page_content( url, expected_text='Looking for a particular collection?', expected_link_label='Atlantis Times Arts', expected_link_target=CFG_SITE_SECURE_URL + '/collection/Atlantis%20Times%20Arts?ln=en') if error_messages: self.fail(merge_error_messages(error_messages))
def test_robot_login_method_expired_assertion(self): """webaccess - robot login method with expired assertion""" for method_name, method in self.robot_login_methods.iteritems(): url = method.test_create_example_url(self.a_email, method_name, self.a_robot, self.myip, timeout=time.time()) time.sleep(1) try: error_messages = test_web_page_content(url, expected_text="The provided assertion is expired") if error_messages: self.fail(merge_error_messages(error_messages)) finally: self._erase_example_user_and_groups()
def test_web_search_restricted_collection(self): """websearch - search for restricted collection, from search page""" url = CFG_SITE_SECURE_URL + '/search?p=Theses&ln=en' error_messages = test_web_page_content(url, username='******', password='******', expected_text='Looking for a particular collection?', expected_link_label='Theses', expected_link_target=CFG_SITE_SECURE_URL + '/collection/Theses?ln=en') if error_messages: self.fail(merge_error_messages(error_messages))
def test_web_search_restricted_submission(self): """websearch - search for restricted submission, from search page""" url = CFG_SITE_SECURE_URL + '/search?p=submit%20thesis&ln=en' error_messages = test_web_page_content(url, username='******', password='******', expected_text='Looking for a particular submission?', expected_link_label='Demo Thesis Submission', expected_link_target=CFG_SITE_SECURE_URL + '/submit?doctype=DEMOTHE&ln=en') if error_messages: self.fail(merge_error_messages(error_messages))
def test_web_search_restricted_submission_category(self): """websearch - search for restricted submission, from search page""" url = CFG_SITE_SECURE_URL + '/search?p=submit%20news&ln=en' error_messages = test_web_page_content(url, username='******', password='******', expected_text='Looking for a particular submission?', expected_link_label='News (Demo Journal Submission)', expected_link_target=CFG_SITE_SECURE_URL + '/submit?doctype=DEMOJRN&ln=en') if error_messages: self.fail(merge_error_messages(error_messages))
def test_websearch_search_services_report(self): """websearchadmin - availability of WebSearch "Search Services" Admin pages""" url = CFG_SITE_URL + '/admin/websearch/websearchadmin.py?colID=1&mtype=perform_checksearchservices&ln=en' error_messages = test_web_page_content( url, username='******', password='', expected_text=['OK', 'JournalHintService']) if error_messages: self.fail(merge_error_messages(error_messages)) return
def test_access_public_record_public_discussion_public_comment(self): """webcomment - accessing "public" comment in a "public" discussion of a restricted record""" # Guest user should not be able to access it self.assertNotEqual( [], test_web_page_content( "%s/%s/%i/comments/" % (CFG_SITE_URL, CFG_SITE_RECORD, self.restr_record), expected_text=self.msg2)) # Accessing a non existing file for a restricted comment should also ask to login self.assertEqual([], test_web_page_content("%s/%s/%i/comments/attachments/get/%i/not_existing_file" % \ (CFG_SITE_URL, CFG_SITE_RECORD, self.restr_record, self.restr_comid_1), expected_text='You are not authorized to perform this action')) # Check accessing file of a restricted comment self.assertEqual([], test_web_page_content("%s/%s/%i/comments/attachments/get/%i/file2" % \ (CFG_SITE_URL, CFG_SITE_RECORD, self.restr_record, self.restr_comid_1), expected_text='You are not authorized to perform this action'))
def test_item_search_availability(self): """bibcirculation - availability of item search""" baseurl = CFG_SITE_URL + '/admin2/bibcirculation/' \ + 'item_search_result?f=barcode&p=bc-34001' self.assertEqual([], test_web_page_content(baseurl, username='******', expected_text='Item details')) return
def test_get_ranked(self): """solrutils - web ranking results""" self.assertEqual([], test_web_page_content(CFG_SITE_URL + '/search?of=id&p=fulltext%3AWillnotfind&rg=100&rm=wrd', expected_text="[]")) self.assertEqual([], test_web_page_content(CFG_SITE_URL + '/search?of=id&p=fulltext%3Ahiggs&rm=wrd', expected_text="[51, 79, 55, 47, 56, 96, 58, 68, 52, 48, 89, 85]")) self.assertEqual([], test_web_page_content(CFG_SITE_URL + '/search?of=id&p=fulltext%3Ahiggs&rg=100&rm=wrd', expected_text="[79, 51, 55, 47, 56, 96, 58, 68, 52, 48, 89, 85]")) # Record 77 is restricted self.assertEqual([], test_web_page_content(CFG_SITE_URL + '/search?of=id&p=fulltext%3Aof&rm=wrd', expected_text="[8, 10, 15, 43, 44, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 60, 61, 62, 64, 68, 74, 78, 79, 81, 82, 83, 84, 85, 88, 89, 90, 91, 92, 95, 96, 97, 86, 11, 80, 93, 77, 12, 59, 87, 47, 94]", username='******')) self.assertEqual([], test_web_page_content(CFG_SITE_URL + '/search?of=id&p=fulltext%3Aof&rg=100&rm=wrd', expected_text="[50, 61, 60, 54, 56, 53, 10, 68, 44, 57, 83, 95, 92, 91, 74, 45, 48, 62, 82, 49, 51, 89, 90, 96, 43, 8, 64, 97, 15, 85, 78, 46, 55, 79, 84, 88, 81, 52, 58, 86, 11, 80, 93, 77, 12, 59, 87, 47, 94]", username='******')) self.assertEqual([], test_web_page_content(CFG_SITE_URL + '/search?of=id&p=fulltext%3A%22higgs+boson%22&rg=100&rm=wrd', expected_text="[55, 56]"))
def test_basic_formatting(self): """bibformat - Checking BibFormat API""" result = format_record(recID=73, of='hx', ln=CFG_SITE_LANG, verbose=0, search_pattern=[], xml_record=None, user_info=None, on_the_fly=True) pageurl = CFG_SITE_URL + '/%s/73?of=hx' % CFG_SITE_RECORD result = test_web_page_content(pageurl, expected_text=result)
def test_language_handling_in_record(self): """webjournal - check washing of ln parameter in /record handler""" error_messages = test_web_page_content( CFG_SITE_URL + '/record/103?verbose=9&ln=hello', expected_text=[ "we rode to another estate", "The forest abounded with beautiful objects" ], unexpected_text=[ "Error when evaluating format element WEBJOURNAL_" ]) if error_messages: self.fail(merge_error_messages(error_messages))
def test_restricted_article_in_released_issue(self): """webjournal - check access to restricted article in released issue""" # Record is not public self.assertEqual(record_public_p(112), False) # Released article (even if restricted) is visible to guest error_messages = test_web_page_content( CFG_SITE_URL + '/journal/AtlantisTimes/2009/03/Science/111', expected_text=["Scissor-beak"], unexpected_text=["A naturalist's voyage around the world"]) if error_messages: self.fail(merge_error_messages(error_messages))