def test_has_permissions(self): journal_source = JournalFixtureFactory.make_journal_source() journal1 = Journal(**journal_source) publisher_owner_src = AccountFixtureFactory.make_publisher_source() publisher_owner = Account(**publisher_owner_src) publisher_stranged_src = AccountFixtureFactory.make_publisher_source() publisher_stranged = Account(**publisher_stranged_src) admin_src = AccountFixtureFactory.make_managing_editor_source() admin = Account(**admin_src) journal1.set_owner(publisher_owner) journal1.save(blocking=True) eissn = journal1.bibjson().get_one_identifier("eissn") pissn = journal1.bibjson().get_one_identifier("pissn") art_source = ArticleFixtureFactory.make_article_source(eissn=eissn, pissn=pissn) article = Article(**art_source) assert self.svc.has_permissions(publisher_stranged, article, False) assert self.svc.has_permissions(publisher_owner, article, True) assert self.svc.has_permissions(admin, article, True) failed_result = self.svc.has_permissions(publisher_stranged, article, True) assert failed_result == {'success': 0, 'fail': 1, 'update': 0, 'new': 0, 'shared': [], 'unowned': [pissn, eissn], 'unmatched': []}, "received: {}".format(failed_result)
def migrate_journals(source): # read in the content f = open(source) xml = etree.parse(f) f.close() journals = xml.getroot() print "migrating", str(len(journals)), "journal records" clusters = _get_journal_clusters(journals) # make a journal object, and map the main and historic records to it for canon, rest in clusters: j = Journal() cb = _to_journal_bibjson(canon) j.set_bibjson(cb) j.set_in_doaj(_is_in_doaj(canon)) j.set_created(_created_date(canon)) for p in rest: replaces = _get_replaces(p) isreplacedby = _get_isreplacedby(p) j.add_history(_to_journal_bibjson(p), replaces=replaces, isreplacedby=isreplacedby) j.save()
class TestCreateOrUpdateArticle(DoajTestCase): def setUp(self): super(TestCreateOrUpdateArticle, self).setUp() self.publisher = Account() self.publisher.add_role("publisher") self.publisher.save(blocking=True) self.admin = Account() self.admin.add_role("admin") self.admin.save(blocking=True) sources = JournalFixtureFactory.make_many_journal_sources(2, True) self.journal1 = Journal(**sources[0]) self.journal1.set_owner(self.publisher.id) jbib1 = self.journal1.bibjson() jbib1.add_identifier(jbib1.P_ISSN, "1111-1111") jbib1.add_identifier(jbib1.E_ISSN, "2222-2222") self.journal1.save(blocking=True) self.publisher.add_journal(self.journal1) self.journal2 = Journal(**sources[1]) jbib2 = self.journal2.bibjson() jbib2.add_identifier(jbib2.P_ISSN, "1234-5678") jbib2.add_identifier(jbib2.E_ISSN, "9876-5432") self.journal2.save(blocking=True) self.article10 = Article(**ArticleFixtureFactory.make_article_source( pissn="1111-1111", eissn="2222-2222", doi="10.0000/article-10", fulltext="https://www.article10.com")) self.article10.set_id("articleid10") self.article10.save(blocking=True) self.article11 = Article(**ArticleFixtureFactory.make_article_source( pissn="1111-1111", eissn="2222-2222", doi="10.0000/article-11", fulltext="https://www.article11.com")) self.article11.set_id("articleid11") self.article11.save(blocking=True) self.article2 = Article(**ArticleFixtureFactory.make_article_source( pissn="1234-5678", eissn="9876-5432", doi="10.0000/article-2", fulltext="https://www.article2.com")) self.article2.set_id("articleid2") self.article2.save(blocking=True) def tearDown(self): super(TestCreateOrUpdateArticle, self).tearDown() def test_00_no_doi_and_url_changed(self): ba = self.article10.bibjson() ba.title = "Updated Article" # try for admin resp = ArticleService.create_article( self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) assert resp["success"] == 1, "expected 1 updated, received: {}".format( resp) assert resp["update"] == 1, "expected 1 updated, received: {}".format( resp) assert resp["new"] == 0, "expected 1 updated, received: {}".format( resp) assert self.article10.bibjson().title == "Updated Article", "Expected `Updated Article`, received: {}" \ .format(self.article10.bibjson().title) ba.title = "Updated 2nd time" # try for publisher resp = ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) assert resp["success"] == 1, "expected 1 updated, received: {}".format( resp) assert resp["update"] == 1, "expected 1 updated, received: {}".format( resp) assert resp["new"] == 0, "expected 1 updated, received: {}".format( resp) assert self.article10.bibjson().title == "Updated 2nd time", "Expected `Updated 2nd time`, received: {}" \ .format(self.article10.bibjson().title) def test_01_new_doi_new_url(self): ba = self.article10.bibjson() ba.remove_identifiers(ba.DOI) ba.remove_urls(ba.FULLTEXT) ba.add_identifier(ba.DOI, "10.0000/NEW") ba.add_url(ba.FULLTEXT, "https://www.UPDATED.com") #for publisher resp = ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) assert resp["success"] == 1, "expected 1 new, received: {}".format( resp) assert resp["update"] == 0, "expected 1 new, received: {}".format(resp) assert resp["new"] == 1, "expected 1 new, received: {}".format(resp) #for admin resp = ArticleService.create_article( self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) assert resp["success"] == 1, "expected 1 new, received: {}".format( resp) assert resp["update"] == 1, "expected 1 new, received: {}".format(resp) assert resp["new"] == 0, "expected 1 new, received: {}".format(resp) def test_02_old_doi_existing_url_admin(self): ba = self.article10.bibjson() ba.remove_urls(ba.FULLTEXT) # check for url from other article owned by the same publisher ba.add_url(self.article11.bibjson().get_single_url(ba.FULLTEXT), ba.FULLTEXT) # try as a publisher with self.assertRaises(ArticleMergeConflict): ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) # try as an admin with self.assertRaises(ArticleMergeConflict): ArticleService.create_article(self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) # check for url from other article owned by someone else ba.remove_urls(ba.FULLTEXT) ba.add_url(self.article2.bibjson().get_single_url(ba.FULLTEXT), ba.FULLTEXT) # try as a publisher with self.assertRaises(ArticleMergeConflict): ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) # try as an admin with self.assertRaises(ArticleMergeConflict): ArticleService.create_article(self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) def test_03_existing_doi_old_url_admin(self): ba = self.article10.bibjson() ba.remove_identifiers(ba.DOI) # check for DOI from other article owned by the same publisher ba.add_identifier(ba.DOI, "10.0000/article-11") # try as a publisher with self.assertRaises(ArticleMergeConflict): ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) # try as an admin with self.assertRaises(ArticleMergeConflict): ArticleService.create_article(self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) ba.remove_identifiers(ba.DOI) # check for DOI from other article owned by someone else ba.add_identifier(ba.DOI, "10.0000/article-2") # try as a publisher with self.assertRaises(ArticleMergeConflict): ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) # try as an admin with self.assertRaises(ArticleMergeConflict): ArticleService.create_article(self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) def test_04_old_doi_new_url(self): ba = self.article10.bibjson() ba.remove_urls(ba.FULLTEXT) ba.add_url("https://updated.com", ba.FULLTEXT) # try as publisher with self.assertRaises(DuplicateArticleException): ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) # try as an admin resp = ArticleService.create_article( self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) assert resp["success"] == 1, "expected 1 updated, received: {}".format( resp) assert resp["update"] == 1, "expected 1 updated, received: {}".format( resp) assert resp["new"] == 0, "expected 1 new, received: {}".format(resp) assert self.article10.get_normalised_fulltext( ) == "//updated.com", "expected //updated.com, received: {}".format( self.article10.get_normalised_fulltext()) def test_05_new_doi_old_url(self): ba = self.article10.bibjson() ba.remove_identifiers(ba.DOI) ba.add_identifier(ba.DOI, "10.0000/article-UPDATED") # try as publisher with self.assertRaises(DuplicateArticleException): ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) # try as an admin resp = ArticleService.create_article( self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) assert resp["success"] == 1, "expected 1 updated, received: {}".format( resp) assert resp["update"] == 1, "expected 1 updated, received: {}".format( resp) assert resp["new"] == 0, "expected 1 updated, received: {}".format( resp) assert self.article10.get_normalised_doi() == "10.0000/article-UPDATED", \ "expected 10.0000/article-UPDATED, received: {}".format( self.article10.get_normalised_fulltext()) def test_06_existing_doi_new_url(self): ba = self.article10.bibjson() ba.remove_urls(ba.FULLTEXT) ba.add_url("https://updated.com", ba.FULLTEXT) # check for doi from other article of the same publisher ba.remove_identifiers(ba.DOI) ba.add_identifier(ba.DOI, self.article11.bibjson().get_one_identifier(ba.DOI)) # try as publisher with self.assertRaises(DuplicateArticleException): ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) with self.assertRaises(DuplicateArticleException): ArticleService.create_article(self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) def test_07_new_doi_existing_url(self): ba = self.article10.bibjson() ba.remove_urls(ba.FULLTEXT) ba.add_url(self.article11.bibjson().get_single_url(ba.FULLTEXT), ba.FULLTEXT) # check for doi from other article of the same publisher ba.remove_identifiers(ba.DOI) ba.add_identifier(ba.DOI, "10.0000/article-UPDATED") # try as publisher with self.assertRaises(DuplicateArticleException): ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) # try as an admin with self.assertRaises(DuplicateArticleException): ArticleService.create_article(self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id)
def test_01_delete_application(self, name, application_type, account_type, current_journal, related_journal, raises): ############################################### ## set up # create the test application (if needed), and the associated current_journal and related_journal in suitable states application = None cj = None rj = None if application_type == "found" or application_type == "locked": application = Suggestion( **ApplicationFixtureFactory.make_application_source()) if current_journal == "none": application.remove_current_journal() elif current_journal == "not_found": application.set_current_journal("123456789987654321") elif current_journal == "found": cj = Journal(**JournalFixtureFactory.make_journal_source()) cj.set_id(cj.makeid()) cj.save(blocking=True) application.set_current_journal(cj.id) elif current_journal == "locked": cj = Journal(**JournalFixtureFactory.make_journal_source()) cj.set_id(cj.makeid()) cj.save(blocking=True) application.set_current_journal(cj.id) lock.lock(constants.LOCK_JOURNAL, cj.id, "otheruser") if related_journal == "none": application.remove_related_journal() elif related_journal == "not_found": application.set_related_journal("123456789987654321") elif related_journal == "found": rj = Journal(**JournalFixtureFactory.make_journal_source()) rj.set_id(rj.makeid()) rj.save(blocking=True) application.set_related_journal(rj.id) elif related_journal == "locked": rj = Journal(**JournalFixtureFactory.make_journal_source()) rj.set_id(rj.makeid()) rj.save(blocking=True) application.set_related_journal(rj.id) lock.lock(constants.LOCK_JOURNAL, rj.id, "otheruser") acc = None if account_type != "none": acc = Account(**AccountFixtureFactory.make_publisher_source()) if account_type == "not_permitted": acc.remove_role("publisher") if application_type == "locked": thelock = lock.lock(constants.LOCK_APPLICATION, application.id, "otheruser") # we can't explicitly block on the lock, but we can halt until we confirm it is saved thelock.blockall([(thelock.id, thelock.last_updated)]) application_id = None if application is not None: if acc is not None: application.set_owner(acc.id) application.save(blocking=True) application_id = application.id elif application_type == "not_found": application_id = "sdjfasofwefkwflkajdfasjd" ########################################################### # Execution svc = DOAJ.applicationService() if raises != "": with self.assertRaises(EXCEPTIONS[raises]): svc.delete_application(application_id, acc) time.sleep(1) check_locks(application, cj, rj, acc) else: svc.delete_application(application_id, acc) # we need to sleep, so the index catches up time.sleep(1) # check that no locks remain set for this user check_locks(application, cj, rj, acc) # check that the application actually is gone if application is not None: assert Suggestion.pull(application.id) is None # check that the current journal no longer has a reference to the application if cj is not None: cj = Journal.pull(cj.id) assert cj.current_application is None # check that the related journal has a record that the application was deleted if rj is not None: rj = Journal.pull(rj.id) record = rj.related_application_record(application.id) assert "status" in record assert record["status"] == "deleted"
if jreapp_str is not None: jreapp = datetime.strptime(jreapp_str, "%Y-%m-%dT%H:%M:%SZ") if acreated < jcreated: date_accepted = journal.created_date elif jreapp is not None and acreated < jreapp: date_accepted = jreapp_str else: date_accepted = application.created_date journal.add_related_application(application.id, date_accepted) row += [date_accepted, "yes", "application is in status 'accepted' and refers to the journal as a related journal"] rows.append(row) journal.save() else: row = [ journal.id, journal.created_date, journal.data.get("last_reapplication", ""), "", "", "", "", "no", "no application refers to this journal as a related journal" ] rows.append(row) print(counter, journal.id) with open("journal_2_application.csv", "wb") as f: writer = csv.writer(f) header = [ "journal", "journal_created", "journal_reapp", "application", "application_status", "application_created", "date_accepted",
class TestAdminEditMetadata(DoajTestCase): def setUp(self): super(TestAdminEditMetadata, self).setUp() admin_account = Account.make_account(username="******", name="Admin", email="*****@*****.**", roles=["admin"]) admin_account.set_password('password123') admin_account.save() publisher_account = Account.make_account(username="******", name="Publisher", email="*****@*****.**", roles=["publisher"]) publisher_account.set_password('password456') publisher_account.save(blocking=True) self.j = Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) self.j.save(blocking=True) self.a = Article(**ArticleFixtureFactory.make_article_source( in_doaj=True)) self.a.save(blocking=True) def tearDown(self): super(TestAdminEditMetadata, self).tearDown() del self.a del self.j def admin_post_article_metadata_form(self, formdata): """ Post a form tto the article metadata endpoint """ with self.app_test.test_client() as t_client: self.login(t_client, "admin", "password123") resp = t_client.post(url_for('admin.article_page', article_id=self.a.id), data=dict(formdata)) assert resp.status_code == 200, "expected: 200, received: {}".format( resp.status) @staticmethod def login(app, username, password): return app.post('/account/login', data=dict(username=username, password=password), follow_redirects=True) @staticmethod def logout(app): return app.get('/account/logout', follow_redirects=True) def test_01_open_article_page(self): """ Ensure only Admin can open the article metadata form """ with self.app_test.test_client() as t_client: self.login(t_client, "admin", "password123") resp = t_client.get(url_for('admin.article_page', article_id=self.a.id), follow_redirects=False) assert resp.status_code == 200, "expected: 200, received: {}".format( resp.status) # user not logged in with self._make_and_push_test_context(): with self.app_test.test_client() as t_client: resp = t_client.get(url_for('admin.article_page', article_id=self.a.id), follow_redirects=False) assert resp.status_code == 302, "expected: 302, received: {}".format( resp.status) #expect redirection to login page # login as publisher with self.app_test.test_client() as t_client: self.login(t_client, "publisher", "password456") resp = t_client.get(url_for('admin.article_page', article_id=self.a.id), follow_redirects=False) assert resp.status_code == 302, "expected: 302, received: {}".format( resp.status) # expect redirection to login page def test_02_update_article_metadata_no_url_fulltext(self): """ Update an article with no change to identifying fields: URL and DOI """ source = ArticleMetadataFactory( article_source=self.a).update_article_no_change_to_url_and_doi() # Submit the form self.admin_post_article_metadata_form(source) # Retrieve the result a = Article.pull(self.a.id) b = a.bibjson() assert b.title == source[ 'title'], 'expected updated title, received: {}'.format(b.title) def test_03_update_fulltext_valid(self): """ Update an article's fulltext URL """ source = ArticleMetadataFactory( article_source=self.a).update_article_fulltext(valid=True) # Submit the form self.admin_post_article_metadata_form(source) a = Article.pull(self.a.id) bj = a.bibjson() # expect updated fulltext url assert bj.get_single_url( "fulltext" ) == 'https://www.newarticleurl.co.uk/fulltext', 'expected updated url, received: {}'.format( bj.get_single_url("fulltext")) def test_04_update_fulltext_invalid(self): """ The form should ignore an update that has the same fulltext URL as an existing article """ source = ArticleMetadataFactory( article_source=self.a).update_article_fulltext(valid=False) a1source = ArticleFixtureFactory.make_article_source(in_doaj=True) a1source["id"] = 'aaaaaaaaa_article' a1source["fulltext"] = "https://www.urltorepeat.com" a1 = Article(**a1source) a1.save(blocking=True) # Submit the form self.admin_post_article_metadata_form(source) # Retrieve the result - it should be unchanged a = Article.pull(self.a.id) bj = a.bibjson() assert bj.title == "Article Title", 'expect old title, received: {}'.format( bj.title) assert bj.get_single_url( "fulltext" ) == 'http://www.example.com/article', 'expected old url, received: {}'.format( bj.get_single_url("fulltext")) def test_05_update_doi_valid(self): """ The form should allow an update with a new valid DOI """ source = ArticleMetadataFactory( article_source=self.a).update_article_doi(valid=True) # Submit the form self.admin_post_article_metadata_form(source) # Retrieve the result a = Article.pull(self.a.id) bj = a.bibjson() # expect new data assert bj.title == "New title", 'expect updated title, received: {}'.format( bj.title) assert bj.get_one_identifier( "doi" ) == '10.1111/article-0', 'expected new doi, received: {}'.format( bj.get_single_identifier("doi")) def test_06_update_doi_invalid(self): source = ArticleMetadataFactory( article_source=self.a).update_article_doi(valid=False) a1source = ArticleFixtureFactory.make_article_source(in_doaj=True) a1source['id'] = 'aaaaaaaaa_article' a1source["fulltext"] = "https://www.someurl.com" a1source["doi"] = '10.1234/article' a1 = Article(**a1source) a1.save(blocking=True) # Submit the form self.admin_post_article_metadata_form(source) a = Article.pull(self.a.id) bj = a.bibjson() # expect old data assert bj.title == "Article Title", 'expect old title, received: {}'.format( bj.title) assert bj.get_one_identifier( "doi" ) == '10.0000/SOME.IDENTIFIER', 'expected old doi, received: {}'.format( bj.get_one_identifier("doi"))
def test_01_accept_application(self, name, application_type, account_type, manual_update, provenance, raises, result_provenance, result_manual_update): ############################################### ## set up # create the application application = None if application_type == "save_fail": application = Suggestion(**ApplicationFixtureFactory.make_application_source()) application.save = mock_save Journal.save = mock_save elif application_type == "with_current_journal": application = Suggestion(**ApplicationFixtureFactory.make_application_source()) application.remove_notes() application.add_note("unique 1", "2002-01-01T00:00:00Z") application.add_note("duplicate", "2001-01-01T00:00:00Z") cj = application.current_journal journal = Journal(**JournalFixtureFactory.make_journal_source()) journal.set_id(cj) journal.remove_notes() journal.add_note("unique 2", "2003-01-01T00:00:00Z") journal.add_note("duplicate", "2001-01-01T00:00:00Z") journal.save(blocking=True) elif application_type == "no_current_journal": application = Suggestion(**ApplicationFixtureFactory.make_application_source()) application.remove_current_journal() acc = None if account_type == "not_allowed": acc = Account(**AccountFixtureFactory.make_publisher_source()) elif account_type == "allowed": acc = Account(**AccountFixtureFactory.make_managing_editor_source()) mu = None if manual_update in ["true", "false"]: mu = manual_update == "true" prov = None if provenance in ["true", "false"]: prov = provenance == "true" save = bool(randint(0,1)) ########################################################### # Execution svc = DOAJ.applicationService() if raises != "": with self.assertRaises(EXCEPTIONS[raises]): svc.accept_application(application, acc, mu, prov) else: journal = svc.accept_application(application, acc, mu, prov, save_journal=save, save_application=save) # we need to sleep, so the index catches up time.sleep(1) # check a few common things assert application.application_status == constants.APPLICATION_STATUS_ACCEPTED assert application.current_journal is None assert journal.current_application is None assert application.related_journal == journal.id related = journal.related_applications if application_type == "with_current_journal": assert len(related) == 3 elif application_type == "no_current_journal": assert len(related) == 1 assert related[0].get("application_id") == application.id assert related[0].get("date_accepted") is not None if result_manual_update == "yes": assert journal.last_manual_update is not None assert journal.last_manual_update != "1970-01-01T00:00:00Z" assert application.last_manual_update is not None assert application.last_manual_update != "1970-01-01T00:00:00Z" elif result_manual_update == "no": assert journal.last_manual_update is None assert application.last_manual_update is None if application_type == "with_current_journal": assert len(journal.notes) == 3 notevals = [note.get("note") for note in journal.notes] assert "duplicate" in notevals assert "unique 1" in notevals assert "unique 2" in notevals app_prov = Provenance.get_latest_by_resource_id(application.id) if result_provenance == "yes": assert app_prov is not None elif result_provenance == "no": assert app_prov is None if save: pass
def test_01_delete_application(self, name, application_type, account_type, current_journal, related_journal, raises): ############################################### ## set up # create the test application (if needed), and the associated current_journal and related_journal in suitable states application = None cj = None rj = None if application_type == "found" or application_type == "locked": application = Suggestion(**ApplicationFixtureFactory.make_application_source()) if current_journal == "none": application.remove_current_journal() elif current_journal == "not_found": application.set_current_journal("123456789987654321") elif current_journal == "found": cj = Journal(**JournalFixtureFactory.make_journal_source()) cj.set_id(cj.makeid()) cj.save(blocking=True) application.set_current_journal(cj.id) elif current_journal == "locked": cj = Journal(**JournalFixtureFactory.make_journal_source()) cj.set_id(cj.makeid()) cj.save(blocking=True) application.set_current_journal(cj.id) lock.lock(constants.LOCK_JOURNAL, cj.id, "otheruser") if related_journal == "none": application.remove_related_journal() elif related_journal == "not_found": application.set_related_journal("123456789987654321") elif related_journal == "found": rj = Journal(**JournalFixtureFactory.make_journal_source()) rj.set_id(rj.makeid()) rj.save(blocking=True) application.set_related_journal(rj.id) elif related_journal == "locked": rj = Journal(**JournalFixtureFactory.make_journal_source()) rj.set_id(rj.makeid()) rj.save(blocking=True) application.set_related_journal(rj.id) lock.lock(constants.LOCK_JOURNAL, rj.id, "otheruser") acc = None if account_type != "none": acc = Account(**AccountFixtureFactory.make_publisher_source()) if account_type == "not_permitted": acc.remove_role("publisher") if application_type == "locked": thelock = lock.lock(constants.LOCK_APPLICATION, application.id, "otheruser") # we can't explicitly block on the lock, but we can halt until we confirm it is saved thelock.blockall([(thelock.id, thelock.last_updated)]) application_id = None if application is not None: if acc is not None: application.set_owner(acc.id) application.save(blocking=True) application_id = application.id elif application_type == "not_found": application_id = u"sdjfasofwefkwflkajdfasjd" ########################################################### # Execution svc = DOAJ.applicationService() if raises != "": with self.assertRaises(EXCEPTIONS[raises]): svc.delete_application(application_id, acc) time.sleep(1) check_locks(application, cj, rj, acc) else: svc.delete_application(application_id, acc) # we need to sleep, so the index catches up time.sleep(1) # check that no locks remain set for this user check_locks(application, cj, rj, acc) # check that the application actually is gone if application is not None: assert Suggestion.pull(application.id) is None # check that the current journal no longer has a reference to the application if cj is not None: cj = Journal.pull(cj.id) assert cj.current_application is None # check that the related journal has a record that the application was deleted if rj is not None: rj = Journal.pull(rj.id) record = rj.related_application_record(application.id) assert "status" in record assert record["status"] == "deleted"
def test_01_discover_duplicates(self, name, kwargs): article_arg = kwargs.get("article") owner_arg = kwargs.get("owner") article_doi_arg = kwargs.get("article_doi") doi_duplicate_arg = kwargs.get("doi_duplicate") article_fulltext_arg = kwargs.get("article_fulltext") fulltext_duplicate_arg = kwargs.get("fulltext_duplicate") articles_by_doi_arg = kwargs.get("articles_by_doi") articles_by_fulltext_arg = kwargs.get("articles_by_fulltext") raises_arg = kwargs.get("raises") raises = EXCEPTIONS.get(raises_arg) ############################################### ## set up owner = None if owner_arg != "none": owner = Account(**AccountFixtureFactory.make_publisher_source()) owner_id = None if owner is not None: owner_id = owner.id # create a journal for the owner if owner_arg not in ["none"]: source = JournalFixtureFactory.make_journal_source(in_doaj=True) journal = Journal(**source) journal.set_owner(owner.id) journal.bibjson().remove_identifiers() journal.bibjson().add_identifier("eissn", "1234-5678") journal.bibjson().add_identifier("pissn", "9876-5432") journal.save(blocking=True) # determine what we need to load into the index article_ids = [] aids_block = [] if owner_arg not in ["none", "no_articles"]: for i, ident in enumerate(IDENTS): the_doi = ident["doi"] if doi_duplicate_arg == "padded": the_doi = " " + the_doi + " " elif doi_duplicate_arg == "prefixed": the_doi = "https://dx.doi.org/" + the_doi the_fulltext = ident["fulltext"] if article_fulltext_arg != "invalid": if fulltext_duplicate_arg == "padded": the_fulltext = " http:" + the_fulltext elif fulltext_duplicate_arg == "http": the_fulltext = "http:" + the_fulltext elif fulltext_duplicate_arg == "https": the_fulltext = "https:" + the_fulltext else: the_fulltext = "http:" + the_fulltext source = ArticleFixtureFactory.make_article_source(eissn="1234-5678", pissn="9876-5432", doi=the_doi, fulltext=the_fulltext) article = Article(**source) article.set_id() article.save() article_ids.append(article.id) aids_block.append((article.id, article.last_updated)) # generate our incoming article article = None doi = None fulltext = None if article_arg == "yes": eissn = "1234=5678" # one matching pissn = "6789-1234" # the other not - issn matches are not relevant to this test if article_doi_arg in ["yes", "padded"]: doi = "10.1234/abc/11" if doi_duplicate_arg in ["yes", "padded"]: doi = IDENTS[0]["doi"] if article_doi_arg == "padded": doi = " doi:" + doi + " " elif article_doi_arg in ["invalid"]: doi = IDENTS[-1]["doi"] if article_fulltext_arg in ["yes", "padded", "https"]: fulltext = "//example.com/11" if fulltext_duplicate_arg in ["yes", "padded", "https"]: fulltext = IDENTS[0]["fulltext"] if fulltext_duplicate_arg == "padded": fulltext = " http:" + fulltext + " " elif fulltext_duplicate_arg == "https": fulltext = "https:" + fulltext else: fulltext = "http:" + fulltext elif article_fulltext_arg == "invalid": fulltext = IDENTS[-1]["fulltext"] source = ArticleFixtureFactory.make_article_source(eissn=eissn, pissn=pissn, doi=doi, fulltext=fulltext) article = Article(**source) # we need to do this if doi or fulltext are none, because the factory will set a default if we don't # provide them if doi is None: article.bibjson().remove_identifiers("doi") if fulltext is None: article.bibjson().remove_urls("fulltext") article.set_id() Article.blockall(aids_block) ########################################################### # Execution svc = DOAJ.articleService() if raises is not None: with self.assertRaises(raises): svc.discover_duplicates(article, owner_id) else: possible_articles = svc.discover_duplicates(article, owner_id) if articles_by_doi_arg == "yes": assert "doi" in possible_articles assert len(possible_articles["doi"]) == 1 # if this is the "invalid" doi, then we expect it to match the final article, otherwise match the first if article_doi_arg == "invalid": assert possible_articles["doi"][0].id == article_ids[-1] else: assert possible_articles["doi"][0].id == article_ids[0] else: if possible_articles is not None: assert "doi" not in possible_articles if articles_by_fulltext_arg == "yes": assert "fulltext" in possible_articles assert len(possible_articles["fulltext"]) == 1 # if this is the "invalid" fulltext url, then we expect it to match the final article, otherwise match the first if article_fulltext_arg == "invalid": assert possible_articles["fulltext"][0].id == article_ids[-1] else: assert possible_articles["fulltext"][0].id == article_ids[0] else: if possible_articles is not None: assert "fulltext" not in possible_articles
def test_01_create_article(self, name, kwargs): article_arg = kwargs.get("article") article_duplicate_arg = kwargs.get("article_duplicate") account_arg = kwargs.get("account") duplicate_check_arg = kwargs.get("duplicate_check") merge_duplicate_arg = kwargs.get("merge_duplicate") limit_to_account_arg = kwargs.get("limit_to_account") add_journal_info_arg = kwargs.get("add_journal_info") dry_run_arg = kwargs.get("dry_run") raises_arg = kwargs.get("raises") success_arg = kwargs.get("success") original_saved_arg = kwargs.get("original_saved") merge_saved_arg = kwargs.get("merge_saved") ############################################### ## set up success = int(success_arg) duplicate_check = None if duplicate_check_arg != "none": duplicate_check = True if duplicate_check_arg == "true" else False merge_duplicate = None if merge_duplicate_arg != "none": merge_duplicate = True if merge_duplicate_arg == "true" else False limit_to_account = None if limit_to_account_arg != "none": limit_to_account = True if limit_to_account_arg == "true" else False add_journal_info = None if add_journal_info_arg != "none": add_journal_info = True if add_journal_info_arg == "true" else False dry_run = None if dry_run_arg != "none": dry_run = True if dry_run_arg == "true" else False raises = EXCEPTIONS.get(raises_arg) eissn = "1234-5678" pissn = "9876-5432" if add_journal_info: jsource = JournalFixtureFactory.make_journal_source(in_doaj=True) j = Journal(**jsource) bj = j.bibjson() bj.title = "Add Journal Info Title" bj.remove_identifiers() bj.add_identifier(bj.P_ISSN, pissn) bj.add_identifier(bj.E_ISSN, eissn) j.save(blocking=True) article = None original_id = None if article_arg == "exists": source = ArticleFixtureFactory.make_article_source(eissn=eissn, pissn=pissn, doi="10.123/abc/1", fulltext="http://example.com/1") del source["bibjson"]["journal"] article = Article(**source) article.set_id() original_id = article.id account = None if account_arg != "none": source = AccountFixtureFactory.make_publisher_source() account = Account(**source) legit = True if account_arg == "owner" else False ilo_mock = BLLArticleMockFactory.is_legitimate_owner(legit=legit) self.svc.is_legitimate_owner = ilo_mock owned = [eissn, pissn] if account_arg == "owner" else [] shared = [] unowned = [eissn] if account_arg == "not_owner" else [] unmatched = [pissn] if account_arg == "not_owner" else [] ios_mock = BLLArticleMockFactory.issn_ownership_status(owned, shared, unowned, unmatched) self.svc.issn_ownership_status = ios_mock gd_mock = None if article_duplicate_arg == "yes": gd_mock = BLLArticleMockFactory.get_duplicate(eissn=eissn, pissn=pissn, doi="10.123/abc/1", fulltext="http://example.com/1") else: gd_mock = BLLArticleMockFactory.get_duplicate(return_none=True) self.svc.get_duplicate = gd_mock mock_article = self.svc.get_duplicate(article) ########################################################### # Execution if raises is not None: with self.assertRaises(raises): self.svc.create_article(article, account, duplicate_check, merge_duplicate, limit_to_account, add_journal_info, dry_run) else: report = self.svc.create_article(article, account, duplicate_check, merge_duplicate, limit_to_account, add_journal_info, dry_run) assert report["success"] == success # check that the article was saved and if it was saved that it was suitably merged if original_saved_arg == "yes": original = Article.pull(original_id) assert original is not None assert report["update"] == 0 elif article is not None: original = Article.pull(original_id) assert original is None if merge_saved_arg == "yes": merged = Article.pull(mock_article.id) assert merged is not None assert report["update"] == 1 elif mock_article is not None: merged = Article.pull(mock_article.id) assert merged is None if add_journal_info: assert article.bibjson().journal_title == "Add Journal Info Title"
from portality.models import Journal import sys # FIXME: in an ideal world, the functional tests would also be wrapped by doaj.helpers.DoajTestCase from doajtest.bootstrap import prepare_for_test prepare_for_test() journals = [ {'title':'Title 1', 'publisher': 'Publisher 1', 'identifier':[{'type':'pissn', 'id': '1234-5678'}], 'active':True}, {'title':'Title 2', 'publisher': 'Publisher 2', 'identifier':[{'type':'pissn', 'id': '0123-4567'}, {'type':'eissn', 'id': '7654-3210'}], 'license': [{'type': 'cc-by','open_access': True, 'url': 'http://opendefinition.org/licenses/cc-by/'}], 'active': True}, {'title':'Title 3', 'publisher': 'Publisher 3', 'author_pays': False, 'active': False}, {'title':'Title 4', 'publisher': 'Publisher 3', 'language': 'en', 'active': True}, {'title':'The "Quoted" Journal of Testing CSV Quotes', 'publisher': 'Publisher 2', 'identifier':[{'type':'pissn', 'id': '2345-6789'}], 'active': False, 'language': 'bg', 'author_pays':True, 'active':True, }, ] for j in journals: jm = Journal(**{'bibjson':j}) jm.save() print 'Sent {0} Journal documents to the index'.format(len(journals)) print 'Refreshing the index' Journal.refresh()
def test_02_application_2_journal(self, name, application_type, manual_update_arg, app_key_properties, current_journal, raises): # set up for the test ######################################### cj = None has_seal = bool(randint(0, 1)) application = None if application_type == "present": application = Suggestion( **ApplicationFixtureFactory.make_application_source()) application.set_id(application.makeid()) application.remove_contacts() application.remove_editor_group() application.remove_editor() application.remove_owner() application.remove_current_journal() application.remove_notes() if app_key_properties == "yes": application.add_contact("Application", "*****@*****.**") application.set_editor_group("appeditorgroup") application.set_editor("appeditor") application.set_owner("appowner") application.set_seal(has_seal) application.add_note("Application Note") if current_journal == "present": journal = Journal( **JournalFixtureFactory.make_journal_source()) journal.remove_contacts() journal.add_contact("Journal", "*****@*****.**") journal.set_editor_group("journaleditorgroup") journal.set_editor("journaleditor") journal.set_owner("journalowner") journal.remove_current_application() journal.remove_notes() journal.add_note("Journal Note") journal.save(blocking=True) application.set_current_journal(journal.id) cj = journal elif current_journal == "missing": application.set_current_journal("123456789987654321") manual_update = None if manual_update_arg == "true": manual_update = True elif manual_update_arg == "false": manual_update = False # execute the test ######################################## svc = DOAJ.applicationService() if raises is not None and raises != "": with self.assertRaises(EXCEPTIONS[raises]): svc.application_2_journal(application, manual_update) else: journal = svc.application_2_journal(application, manual_update) # check the result ###################################### assert journal is not None assert isinstance(journal, Journal) assert journal.is_in_doaj() is True jbj = journal.bibjson().data del jbj["active"] assert jbj == application.bibjson().data if current_journal == "present": assert len(journal.related_applications) == 3 else: assert len(journal.related_applications) == 1 related = journal.related_application_record(application.id) assert related is not None if manual_update_arg == "true": assert journal.last_manual_update is not None and journal.last_manual_update != "1970-01-01T00:00:00Z" if app_key_properties == "yes": contacts = journal.contacts() assert len(contacts) == 1 assert contacts[0].get("name") == "Application" assert contacts[0].get("email") == "*****@*****.**" assert journal.editor_group == "appeditorgroup" assert journal.editor == "appeditor" assert journal.owner == "appowner" assert journal.has_seal() == has_seal if current_journal == "present": assert len(journal.notes) == 2 else: assert len(journal.notes) == 1 elif app_key_properties == "no": if current_journal == "present": contacts = journal.contacts() assert len(contacts) == 1 assert contacts[0].get("name") == "Journal" assert contacts[0].get("email") == "*****@*****.**" assert journal.editor_group == "journaleditorgroup" assert journal.editor == "journaleditor" assert journal.owner == "journalowner" assert journal.has_seal() == has_seal assert len(journal.notes) == 2 elif current_journal == "none" or current_journal == "missing": contacts = journal.contacts() assert len(contacts) == 0 assert journal.editor_group is None assert journal.editor is None assert journal.owner is None assert journal.has_seal() == has_seal assert len(journal.notes) == 1 if current_journal == "present": assert cj.id == journal.id assert cj.created_date == journal.created_date
def test_01_reject_application(self, name, application, application_status, account, prov, current_journal, note, save, raises=None): ####################################### ## set up if save == "fail": Suggestion.save = mock_save_fail ap = None journal = None if application == "exists": ap = Suggestion( **ApplicationFixtureFactory.make_application_source()) ap.set_application_status(application_status) ap.set_id(ap.makeid()) ap.remove_notes() if current_journal == "yes": journal = Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) journal.set_id(journal.makeid()) journal.set_current_application(ap.id) journal.save(blocking=True) ap.set_current_journal(journal.id) else: ap.remove_current_journal() acc = None if account == "publisher": acc = Account(**AccountFixtureFactory.make_publisher_source()) elif account == "admin": acc = Account( **AccountFixtureFactory.make_managing_editor_source()) provenance = None if prov != "none": provenance = prov == "true" thenote = None if note == "yes": thenote = "abcdefg" ######################################## ## execute svc = DOAJ.applicationService() if raises is not None and raises != "": with self.assertRaises(EXCEPTIONS[raises]): svc.reject_application(ap, acc, provenance, note=thenote) else: svc.reject_application(ap, acc, provenance, note=thenote) time.sleep(1) ####################################### ## Check ap2 = Suggestion.pull(ap.id) assert ap2 is not None assert ap2.application_status == constants.APPLICATION_STATUS_REJECTED assert ap2.current_journal is None # check the updated and manually updated date are essentially the same (they can theoretically differ # by a small amount just based on when they are set) updated_spread = abs( (ap2.last_updated_timestamp - ap2.last_manual_update_timestamp).total_seconds()) assert updated_spread <= 1.0 if current_journal == "yes" and journal is not None: j2 = Journal.pull(journal.id) assert j2 is not None assert j2.current_application is None assert ap2.related_journal == j2.id if prov == "true": pr = Provenance.get_latest_by_resource_id(ap.id) assert pr is not None if note == "yes": assert len(ap2.notes) == 1 assert ap2.notes[0].get("note") == "abcdefg" elif note == "no": assert len(ap2.notes) == 0
def test_01_update_request(self, name, journal_id, journal_lock, account, account_role, account_is_owner, current_applications, application_lock, application_status, completed_applications, raises, return_app, return_jlock, return_alock, db_jlock, db_alock, db_app): ############################################### ## set up # create the journal journal = None jid = None if journal_id == "valid": journal = Journal(**JournalFixtureFactory.make_journal_source(in_doaj=True)) journal.remove_related_applications() journal.remove_current_application() jid = journal.id elif journal_id == "not_in_doaj": journal = Journal(**JournalFixtureFactory.make_journal_source(in_doaj=False)) journal.remove_related_applications() journal.remove_current_application() jid = journal.id elif journal_id == "missing": jid = uuid.uuid4().hex acc = None if account == "yes": acc = Account(**AccountFixtureFactory.make_publisher_source()) if account_role == "none": acc.remove_role("publisher") elif account_role == "admin": acc.remove_role("publisher") acc.add_role("admin") acc.set_id(acc.makeid()) if account_is_owner == "yes": acc.set_id(journal.owner) if journal_lock == "yes": lock.lock("journal", jid, "someoneelse", blocking=True) latest_app = None current_app_count = int(current_applications) for i in range(current_app_count): app = Suggestion(**ApplicationFixtureFactory.make_application_source()) app.set_id(app.makeid()) app.set_created("198" + str(i) + "-01-01T00:00:00Z") app.set_current_journal(jid) app.save() latest_app = app if journal is not None: journal.set_current_application(app.id) comp_app_count = int(completed_applications) for i in range(comp_app_count): app = Suggestion(**ApplicationFixtureFactory.make_application_source()) app.set_id(app.makeid()) app.set_created("197" + str(i) + "-01-01T00:00:00Z") app.set_related_journal(jid) app.save() if journal is not None: journal.add_related_application(app.id, date_accepted=app.created_date) if current_app_count == 0 and comp_app_count == 0: # save at least one record to initialise the index mapping, otherwise tests fail app = Suggestion(**ApplicationFixtureFactory.make_application_source()) app.set_id(app.makeid()) app.save() if application_lock == "yes": lock.lock("suggestion", latest_app.id, "someoneelse", blocking=True) if application_status != "n/a": latest_app.set_application_status(application_status) latest_app.save(blocking=True) # finally save the journal record, ensuring we get a blocking save, so everything # above here should be synchronised with the repo if journal is not None: journal.save(blocking=True) ########################################################### # Execution svc = DOAJ.applicationService() if raises != "": with self.assertRaises(EXCEPTIONS[raises]): svc.update_request_for_journal(jid, acc) else: application, jlock, alock = svc.update_request_for_journal(jid, acc) # we need to sleep, so the index catches up time.sleep(1) if return_app == "none": assert application is None elif return_app == "yes": assert application is not None if return_jlock == "none": assert jlock is None elif return_jlock == "yes": assert jlock is not None if return_alock == "none": assert alock is None elif return_alock == "yes": assert alock is not None if db_jlock == "no" and acc is not None: assert not lock.has_lock("journal", jid, acc.id) elif db_jlock == "yes" and acc is not None: assert lock.has_lock("journal", jid, acc.id) if db_alock == "no" and application.id is not None and acc is not None: assert not lock.has_lock("suggestion", application.id, acc.id) elif db_alock == "yes" and application.id is not None and acc is not None: assert lock.has_lock("suggestion", application.id, acc.id) if db_app == "no" and application.id is not None: indb = Suggestion.q2obj(q="id.exact:" + application.id) assert indb is None elif db_app == "yes" and application.id is not None: indb = Suggestion.q2obj(q="id.exact:" + application.id) assert indb is not None if current_app_count == 0 and comp_app_count == 0 and application is not None: assert application.article_metadata is None assert application.articles_last_year is None elif application is not None: assert application.article_metadata is not None assert application.articles_last_year is not None
def test_01_create_article(self, name, kwargs): article_arg = kwargs.get("article") article_duplicate_arg = kwargs.get("article_duplicate") account_arg = kwargs.get("account") duplicate_check_arg = kwargs.get("duplicate_check") merge_duplicate_arg = kwargs.get("merge_duplicate") limit_to_account_arg = kwargs.get("limit_to_account") add_journal_info_arg = kwargs.get("add_journal_info") dry_run_arg = kwargs.get("dry_run") raises_arg = kwargs.get("raises") success_arg = kwargs.get("success") original_saved_arg = kwargs.get("original_saved") merge_saved_arg = kwargs.get("merge_saved") ############################################### ## set up success = int(success_arg) duplicate_check = None if duplicate_check_arg != "none": duplicate_check = True if duplicate_check_arg == "true" else False merge_duplicate = None if merge_duplicate_arg != "none": merge_duplicate = True if merge_duplicate_arg == "true" else False limit_to_account = None if limit_to_account_arg != "none": limit_to_account = True if limit_to_account_arg == "true" else False add_journal_info = None if add_journal_info_arg != "none": add_journal_info = True if add_journal_info_arg == "true" else False dry_run = None if dry_run_arg != "none": dry_run = True if dry_run_arg == "true" else False raises = EXCEPTIONS.get(raises_arg) eissn = "1234-5678" pissn = "9876-5432" if add_journal_info: jsource = JournalFixtureFactory.make_journal_source(in_doaj=True) j = Journal(**jsource) bj = j.bibjson() bj.title = "Add Journal Info Title" bj.remove_identifiers() bj.add_identifier(bj.P_ISSN, pissn) bj.add_identifier(bj.E_ISSN, eissn) j.save(blocking=True) article = None original_id = None if article_arg == "exists": source = ArticleFixtureFactory.make_article_source( eissn=eissn, pissn=pissn, doi="10.123/abc/1", fulltext="http://example.com/1") del source["bibjson"]["journal"] article = Article(**source) article.set_id() original_id = article.id account = None if account_arg != "none": source = AccountFixtureFactory.make_publisher_source() account = Account(**source) legit = True if account_arg == "owner" else False ilo_mock = BLLArticleMockFactory.is_legitimate_owner(legit=legit) self.svc.is_legitimate_owner = ilo_mock owned = [eissn, pissn] if account_arg == "owner" else [] shared = [] unowned = [eissn] if account_arg == "not_owner" else [] unmatched = [pissn] if account_arg == "not_owner" else [] ios_mock = BLLArticleMockFactory.issn_ownership_status( owned, shared, unowned, unmatched) self.svc.issn_ownership_status = ios_mock gd_mock = None if article_duplicate_arg == "yes": gd_mock = BLLArticleMockFactory.get_duplicate( eissn=eissn, pissn=pissn, doi="10.123/abc/1", fulltext="http://example.com/1") else: gd_mock = BLLArticleMockFactory.get_duplicate(return_none=True) self.svc.get_duplicate = gd_mock mock_article = self.svc.get_duplicate(article) ########################################################### # Execution if raises is not None: with self.assertRaises(raises): self.svc.create_article(article, account, duplicate_check, merge_duplicate, limit_to_account, add_journal_info, dry_run) else: report = self.svc.create_article(article, account, duplicate_check, merge_duplicate, limit_to_account, add_journal_info, dry_run) assert report["success"] == success # check that the article was saved and if it was saved that it was suitably merged if original_saved_arg == "yes": original = Article.pull(original_id) assert original is not None assert report["update"] == 0 elif article is not None: original = Article.pull(original_id) assert original is None if merge_saved_arg == "yes": merged = Article.pull(mock_article.id) assert merged is not None assert report["update"] == 1 elif mock_article is not None: merged = Article.pull(mock_article.id) assert merged is None if add_journal_info: assert article.bibjson( ).journal_title == "Add Journal Info Title"
def test_01_reject_application(self, name, application, application_status, account, prov, current_journal, note, save, raises=None): ####################################### ## set up if save == "fail": Suggestion.save = mock_save_fail ap = None journal = None if application == "exists": ap = Suggestion(**ApplicationFixtureFactory.make_application_source()) ap.set_application_status(application_status) ap.set_id(ap.makeid()) ap.remove_notes() if current_journal == "yes": journal = Journal(**JournalFixtureFactory.make_journal_source(in_doaj=True)) journal.set_id(journal.makeid()) journal.set_current_application(ap.id) journal.save(blocking=True) ap.set_current_journal(journal.id) else: ap.remove_current_journal() acc = None if account == "publisher": acc = Account(**AccountFixtureFactory.make_publisher_source()) elif account == "admin": acc = Account(**AccountFixtureFactory.make_managing_editor_source()) provenance = None if prov != "none": provenance = prov == "true" thenote = None if note == "yes": thenote = "abcdefg" ######################################## ## execute svc = DOAJ.applicationService() if raises is not None and raises != "": with self.assertRaises(EXCEPTIONS[raises]): svc.reject_application(ap, acc, provenance, note=thenote) else: svc.reject_application(ap, acc, provenance, note=thenote) time.sleep(1) ####################################### ## Check ap2 = Suggestion.pull(ap.id) assert ap2 is not None assert ap2.application_status == constants.APPLICATION_STATUS_REJECTED assert ap2.current_journal is None # check the updated and manually updated date are essentially the same (they can theoretically differ # by a small amount just based on when they are set) updated_spread = abs((ap2.last_updated_timestamp - ap2.last_manual_update_timestamp).total_seconds()) assert updated_spread <= 1.0 if current_journal == "yes" and journal is not None: j2 = Journal.pull(journal.id) assert j2 is not None assert j2.current_application is None assert ap2.related_journal == j2.id if prov == "true": pr = Provenance.get_latest_by_resource_id(ap.id) assert pr is not None if note == "yes": assert len(ap2.notes) == 1 assert ap2.notes[0].get("note") == "abcdefg" elif note == "no": assert len(ap2.notes) == 0
def test_01_update_request(self, name, journal_id, journal_lock, account, account_role, account_is_owner, current_applications, application_lock, application_status, completed_applications, raises, return_app, return_jlock, return_alock, db_jlock, db_alock, db_app): ############################################### ## set up # create the journal journal = None jid = None if journal_id == "valid": journal = Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) journal.remove_related_applications() journal.remove_current_application() jid = journal.id elif journal_id == "not_in_doaj": journal = Journal(**JournalFixtureFactory.make_journal_source( in_doaj=False)) journal.remove_related_applications() journal.remove_current_application() jid = journal.id elif journal_id == "missing": jid = uuid.uuid4().hex acc = None if account == "yes": acc = Account(**AccountFixtureFactory.make_publisher_source()) if account_role == "none": acc.remove_role("publisher") elif account_role == "admin": acc.remove_role("publisher") acc.add_role("admin") acc.set_id(acc.makeid()) if account_is_owner == "yes": acc.set_id(journal.owner) if journal_lock == "yes": lock.lock("journal", jid, "someoneelse", blocking=True) latest_app = None current_app_count = int(current_applications) for i in range(current_app_count): app = Suggestion( **ApplicationFixtureFactory.make_application_source()) app.set_id(app.makeid()) app.set_created("198" + str(i) + "-01-01T00:00:00Z") app.set_current_journal(jid) app.save() latest_app = app if journal is not None: journal.set_current_application(app.id) comp_app_count = int(completed_applications) for i in range(comp_app_count): app = Suggestion( **ApplicationFixtureFactory.make_application_source()) app.set_id(app.makeid()) app.set_created("197" + str(i) + "-01-01T00:00:00Z") app.set_related_journal(jid) app.save() if journal is not None: journal.add_related_application(app.id, date_accepted=app.created_date) if current_app_count == 0 and comp_app_count == 0: # save at least one record to initialise the index mapping, otherwise tests fail app = Suggestion( **ApplicationFixtureFactory.make_application_source()) app.set_id(app.makeid()) app.save() if application_lock == "yes": lock.lock("suggestion", latest_app.id, "someoneelse", blocking=True) if application_status != "n/a": latest_app.set_application_status(application_status) latest_app.save(blocking=True) # finally save the journal record, ensuring we get a blocking save, so everything # above here should be synchronised with the repo if journal is not None: journal.save(blocking=True) ########################################################### # Execution svc = DOAJ.applicationService() if raises != "": with self.assertRaises(EXCEPTIONS[raises]): svc.update_request_for_journal(jid, acc) else: application, jlock, alock = svc.update_request_for_journal( jid, acc) # we need to sleep, so the index catches up time.sleep(1) if return_app == "none": assert application is None elif return_app == "yes": assert application is not None if return_jlock == "none": assert jlock is None elif return_jlock == "yes": assert jlock is not None if return_alock == "none": assert alock is None elif return_alock == "yes": assert alock is not None if db_jlock == "no" and acc is not None: assert not lock.has_lock("journal", jid, acc.id) elif db_jlock == "yes" and acc is not None: l = lock.has_lock("journal", jid, acc.id) assert lock.has_lock("journal", jid, acc.id) if db_alock == "no" and application.id is not None and acc is not None: assert not lock.has_lock("suggestion", application.id, acc.id) elif db_alock == "yes" and application.id is not None and acc is not None: assert lock.has_lock("suggestion", application.id, acc.id) if db_app == "no" and application.id is not None: indb = Suggestion.q2obj(q="id.exact:" + application.id) assert indb is None elif db_app == "yes" and application.id is not None: indb = Suggestion.q2obj(q="id.exact:" + application.id) assert indb is not None if current_app_count == 0 and comp_app_count == 0 and application is not None: assert application.article_metadata is None assert application.articles_last_year is None elif application is not None: assert application.article_metadata is not None assert application.articles_last_year is not None
if acreated < jcreated: date_accepted = journal.created_date elif jreapp is not None and acreated < jreapp: date_accepted = jreapp_str else: date_accepted = application.created_date journal.add_related_application(application.id, date_accepted) row += [ date_accepted, "yes", "application is in status 'accepted' and refers to the journal as a related journal" ] rows.append(row) journal.save() else: row = [ journal.id, journal.created_date, journal.data.get("last_reapplication", ""), "", "", "", "", "no", "no application refers to this journal as a related journal" ] rows.append(row) print((counter, journal.id)) with open("journal_2_application.csv", "wb") as f: writer = csv.writer(f) header = [ "journal", "journal_created", "journal_reapp", "application",
def test_02_application_2_journal(self, name, application_type, manual_update_arg, app_key_properties, current_journal, raises): # set up for the test ######################################### cj = None has_seal = bool(randint(0, 1)) application = None if application_type == "present": application = Suggestion(**ApplicationFixtureFactory.make_application_source()) application.set_id(application.makeid()) application.remove_contacts() application.remove_editor_group() application.remove_editor() application.remove_owner() application.remove_current_journal() application.remove_notes() if app_key_properties == "yes": application.add_contact("Application", "*****@*****.**") application.set_editor_group("appeditorgroup") application.set_editor("appeditor") application.set_owner("appowner") application.set_seal(has_seal) application.add_note("Application Note") if current_journal == "present": journal = Journal(**JournalFixtureFactory.make_journal_source()) journal.remove_contacts() journal.add_contact("Journal", "*****@*****.**") journal.set_editor_group("journaleditorgroup") journal.set_editor("journaleditor") journal.set_owner("journalowner") journal.remove_current_application() journal.remove_notes() journal.add_note("Journal Note") journal.save(blocking=True) application.set_current_journal(journal.id) cj = journal elif current_journal == "missing": application.set_current_journal("123456789987654321") manual_update = None if manual_update_arg == "true": manual_update = True elif manual_update_arg == "false": manual_update = False # execute the test ######################################## svc = DOAJ.applicationService() if raises is not None and raises != "": with self.assertRaises(EXCEPTIONS[raises]): svc.application_2_journal(application, manual_update) else: journal = svc.application_2_journal(application, manual_update) # check the result ###################################### assert journal is not None assert isinstance(journal, Journal) assert journal.is_in_doaj() is True jbj = journal.bibjson().data del jbj["active"] assert jbj == application.bibjson().data if current_journal == "present": assert len(journal.related_applications) == 3 else: assert len(journal.related_applications) == 1 related = journal.related_application_record(application.id) assert related is not None if manual_update_arg == "true": assert journal.last_manual_update is not None and journal.last_manual_update != "1970-01-01T00:00:00Z" if app_key_properties == "yes": contacts = journal.contacts() assert len(contacts) == 1 assert contacts[0].get("name") == "Application" assert contacts[0].get("email") == "*****@*****.**" assert journal.editor_group == "appeditorgroup" assert journal.editor == "appeditor" assert journal.owner == "appowner" assert journal.has_seal() == has_seal if current_journal == "present": assert len(journal.notes) == 2 else: assert len(journal.notes) == 1 elif app_key_properties == "no": if current_journal == "present": contacts = journal.contacts() assert len(contacts) == 1 assert contacts[0].get("name") == "Journal" assert contacts[0].get("email") == "*****@*****.**" assert journal.editor_group == "journaleditorgroup" assert journal.editor == "journaleditor" assert journal.owner == "journalowner" assert journal.has_seal() == has_seal assert len(journal.notes) == 2 elif current_journal == "none" or current_journal == "missing": contacts = journal.contacts() assert len(contacts) == 0 assert journal.editor_group is None assert journal.editor is None assert journal.owner is None assert journal.has_seal() == has_seal assert len(journal.notes) == 1 if current_journal == "present": assert cj.id == journal.id assert cj.created_date == journal.created_date
def test_01_discover_duplicates(self, name, kwargs): article_arg = kwargs.get("article") owner_arg = kwargs.get("owner") article_doi_arg = kwargs.get("article_doi") doi_duplicate_arg = kwargs.get("doi_duplicate") article_fulltext_arg = kwargs.get("article_fulltext") fulltext_duplicate_arg = kwargs.get("fulltext_duplicate") articles_by_doi_arg = kwargs.get("articles_by_doi") articles_by_fulltext_arg = kwargs.get("articles_by_fulltext") raises_arg = kwargs.get("raises") raises = EXCEPTIONS.get(raises_arg) ############################################### ## set up owner = None if owner_arg != "none": owner = Account(**AccountFixtureFactory.make_publisher_source()) owner_id = None if owner is not None: owner_id = owner.id # create a journal for the owner if owner_arg not in ["none"]: source = JournalFixtureFactory.make_journal_source(in_doaj=True) journal = Journal(**source) journal.set_owner(owner.id) journal.bibjson().remove_identifiers() journal.bibjson().add_identifier("eissn", "1234-5678") journal.bibjson().add_identifier("pissn", "9876-5432") journal.save() # determine what we need to load into the index article_ids = [] aids_block = [] if owner_arg not in ["none", "no_articles"]: for i, ident in enumerate(IDENTS): the_doi = ident["doi"] if doi_duplicate_arg == "padded": the_doi = " " + the_doi + " " elif doi_duplicate_arg == "prefixed": the_doi = "https://dx.doi.org/" + the_doi the_fulltext = ident["fulltext"] if article_fulltext_arg != "invalid": if fulltext_duplicate_arg == "padded": the_fulltext = " http:" + the_fulltext elif fulltext_duplicate_arg == "http": the_fulltext = "http:" + the_fulltext elif fulltext_duplicate_arg == "https": the_fulltext = "https:" + the_fulltext else: the_fulltext = "http:" + the_fulltext source = ArticleFixtureFactory.make_article_source( eissn="1234-5678", pissn="9876-5432", doi=the_doi, fulltext=the_fulltext) article = Article(**source) article.set_id() article.save(blocking=True) article_ids.append(article.id) aids_block.append((article.id, article.last_updated)) # generate our incoming article article = None doi = None fulltext = None if article_arg == "yes": eissn = "1234=5678" # one matching pissn = "6789-1234" # the other not - issn matches are not relevant to this test if article_doi_arg in ["yes", "padded"]: doi = "10.1234/abc/11" if doi_duplicate_arg in ["yes", "padded"]: doi = IDENTS[0]["doi"] if article_doi_arg == "padded": doi = " doi:" + doi + " " elif article_doi_arg in ["invalid"]: doi = IDENTS[-1]["doi"] if article_fulltext_arg in ["yes", "padded", "https"]: fulltext = "//example.com/11" if fulltext_duplicate_arg in ["yes", "padded", "https"]: fulltext = IDENTS[0]["fulltext"] if fulltext_duplicate_arg == "padded": fulltext = " http:" + fulltext + " " elif fulltext_duplicate_arg == "https": fulltext = "https:" + fulltext else: fulltext = "http:" + fulltext elif article_fulltext_arg == "invalid": fulltext = IDENTS[-1]["fulltext"] source = ArticleFixtureFactory.make_article_source( eissn=eissn, pissn=pissn, doi=doi, fulltext=fulltext) article = Article(**source) # we need to do this if doi or fulltext are none, because the factory will set a default if we don't # provide them if doi is None: article.bibjson().remove_identifiers("doi") if fulltext is None: article.bibjson().remove_urls("fulltext") article.set_id() Article.blockall(aids_block) ########################################################### # Execution svc = DOAJ.articleService() if raises is not None: with self.assertRaises(raises): svc.discover_duplicates(article) else: possible_articles = svc.discover_duplicates(article) if articles_by_doi_arg == "yes": assert "doi" in possible_articles assert len(possible_articles["doi"]) == 1 # if this is the "invalid" doi, then we expect it to match the final article, otherwise match the first if article_doi_arg == "invalid": assert possible_articles["doi"][0].id == article_ids[-1] else: assert possible_articles["doi"][0].id == article_ids[0] else: if possible_articles is not None: assert "doi" not in possible_articles if articles_by_fulltext_arg == "yes": assert "fulltext" in possible_articles assert len(possible_articles["fulltext"]) == 1 # if this is the "invalid" fulltext url, then we expect it to match the final article, otherwise match the first if article_fulltext_arg == "invalid": assert possible_articles["fulltext"][0].id == article_ids[ -1] else: assert possible_articles["fulltext"][0].id == article_ids[ 0] else: if possible_articles is not None: assert "fulltext" not in possible_articles
def test_01_create_article(self, value, kwargs): article_arg = kwargs.get("article") account_arg = kwargs.get("account") get_duplicate_result_arg = kwargs.get("get_duplicate_result") role_arg = kwargs.get("role") merge_duplicate_arg = kwargs.get("merge_duplicate") add_journal_info_arg = kwargs.get("add_journal_info") dry_run_arg = kwargs.get("dry_run") update_article_id_arg = kwargs.get("update_article_id") has_ft_doi_changed_arg = kwargs.get("has_ft_doi_changed_arg") raises_arg = kwargs.get("raises") success_arg = kwargs.get("success") original_saved_arg = kwargs.get("original_saved") merge_saved_arg = kwargs.get("merge_saved") ############################################### ## set up success = int(success_arg) has_ft_doi_changed = True if has_ft_doi_changed_arg == "yes" else False merge_duplicate = None if merge_duplicate_arg != "none": merge_duplicate = True if merge_duplicate_arg == "true" else False add_journal_info = None if add_journal_info_arg != "none": add_journal_info = True if add_journal_info_arg == "true" else False dry_run = None if dry_run_arg != "none": dry_run = True if dry_run_arg == "true" else False raises = EXCEPTIONS.get(raises_arg) eissn = "1234-5678" pissn = "9876-5432" doi = "10.123/abc/1" fulltext = "http://example.com/1" another_doi = "10.123/duplicate-1" another_eissn = "1111-1111" another_pissn = "2222-2222" duplicate_id = None original_id = None update_article_id = None if add_journal_info: jsource = JournalFixtureFactory.make_journal_source(in_doaj=True) j = Journal(**jsource) bj = j.bibjson() bj.title = "Add Journal Info Title" bj.remove_identifiers() bj.add_identifier(bj.P_ISSN, pissn) bj.add_identifier(bj.E_ISSN, eissn) j.save(blocking=True) if get_duplicate_result_arg == 'different': source = ArticleFixtureFactory.make_article_source( eissn=another_eissn, pissn=another_pissn, doi=doi, fulltext=fulltext) del source["bibjson"]["journal"] duplicate = Article(**source) duplicate.save() duplicate_id = duplicate.id article_id_to_upload = None if article_arg == "exists": source = ArticleFixtureFactory.make_article_source( eissn=eissn, pissn=pissn, doi=doi, fulltext=fulltext) del source["bibjson"]["journal"] article = Article(**source) article.set_id() article_id_to_upload = article.id if get_duplicate_result_arg == "itself": source = ArticleFixtureFactory.make_article_source( eissn=another_eissn, pissn=another_pissn, doi=doi, fulltext=fulltext) del source["bibjson"]["journal"] duplicate = Article(**source) duplicate.set_id(article_id_to_upload) duplicate.save() duplicate_id = duplicate.id if update_article_id_arg != "none": another_source = ArticleFixtureFactory.make_article_source( eissn=eissn, pissn=pissn, doi=doi, fulltext=fulltext) original = Article(**another_source) original.save(blocking=True) original_id = original.id if update_article_id_arg == "doi_ft_not_changed": article.bibjson().title = "This needs to be updated" elif update_article_id_arg == "doi_ft_changed_duplicate": article.bibjson().remove_identifiers("doi") article.bibjson().add_identifier("doi", another_doi) elif update_article_id_arg == "doi_ft_changed_ok": article.bibjson().remove_identifiers("doi") article.bibjson().add_identifier("doi", "10.1234/updated") else: update_article_id = None account = None if account_arg != "none": source = AccountFixtureFactory.make_publisher_source() account = Account(**source) legit = True if account_arg == "owner" else False ilo_mock = BLLArticleMockFactory.is_legitimate_owner(legit=legit) self.svc.is_legitimate_owner = ilo_mock owned = [eissn, pissn] if account_arg == "owner" else [] shared = [] unowned = [eissn] if account_arg == "not_owner" else [] unmatched = [pissn] if account_arg == "not_owner" else [] ios_mock = BLLArticleMockFactory.issn_ownership_status( owned, shared, unowned, unmatched) self.svc.issn_ownership_status = ios_mock if role_arg == "admin": account.set_role("admin") account.save() if get_duplicate_result_arg == "none": gd_mock = BLLArticleMockFactory.get_duplicate(return_none=True) elif get_duplicate_result_arg == "itself": gd_mock = BLLArticleMockFactory.get_duplicate( eissn=eissn, pissn=pissn, doi=doi, fulltext=fulltext, given_article_id=original_id) elif get_duplicate_result_arg == "different": gd_mock = BLLArticleMockFactory.get_duplicate( eissn=another_eissn, pissn=another_pissn, doi=doi, fulltext=fulltext, given_article_id=duplicate_id) else: gd_mock = BLLArticleMockFactory.get_duplicate( given_article_id="exception") self.svc.get_duplicate = gd_mock mock_article = self.svc.get_duplicate(article) if role_arg == "admin" or (role_arg == "publisher" and account_arg == "owner"): has_permissions_mock = BLLArticleMockFactory.has_permissions(True) else: has_permissions_mock = BLLArticleMockFactory.has_permissions(False) self.svc.has_permissions = has_permissions_mock prepare_update_admin_mock = BLLArticleMockFactory._prepare_update_admin( get_duplicate_result_arg, update_article_id_arg) self.svc._prepare_update_admin = prepare_update_admin_mock prepare_update_publisher_mock = BLLArticleMockFactory._prepare_update_publisher( get_duplicate_result_arg, has_ft_doi_changed) self.svc._prepare_update_publisher = prepare_update_publisher_mock ########################################################### # Execution if raises is not None: with self.assertRaises(raises): self.svc.create_article(article, account, merge_duplicate=merge_duplicate, add_journal_info=add_journal_info, dry_run=dry_run, update_article_id=original_id) else: report = self.svc.create_article(article, account, merge_duplicate=merge_duplicate, add_journal_info=add_journal_info, dry_run=dry_run, update_article_id=original_id) assert report["success"] == success # check that the article was saved and if it was saved that it was suitably merged if original_saved_arg == "yes" and update_article_id is not None: if get_duplicate_result_arg == "itself": original = Article.pull(update_article_id) assert original is not None assert report["update"] == 1, "update: {}".format( report["update"]) assert report["new"] == 0, "update: {}".format( report["new"]) elif original_saved_arg == "yes": if get_duplicate_result_arg == "itself": new = Article.pull(article_id_to_upload) assert new is not None assert report["update"] == 1, "update: {}".format( report["update"]) assert report["new"] == 0, "update: {}".format( report["new"]) elif get_duplicate_result_arg == "none": new = Article.pull(article_id_to_upload) assert new is not None assert report["update"] == 0, "update: {}".format( report["update"]) assert report["new"] == 1, "update: {}".format( report["new"]) if merge_saved_arg == "yes": merged = Article.pull(mock_article.id) assert merged is not None assert report["update"] == 1 elif mock_article is not None and mock_article.id != original_id: merged = Article.pull(mock_article.id) assert merged is None, "merged: {}".format(merged) if add_journal_info: assert article.bibjson( ).journal_title == "Add Journal Info Title" if update_article_id_arg == "doi_ft_changed_ok": original = Article.pull(original_id) assert original is not None elif update_article_id_arg == "doi_ft_not_changed": original = Article.pull(original_id) assert original is not None