def test_has_permissions(self): journal_source = JournalFixtureFactory.make_journal_source() journal1 = Journal(**journal_source) publisher_owner_src = AccountFixtureFactory.make_publisher_source() publisher_owner = Account(**publisher_owner_src) publisher_stranged_src = AccountFixtureFactory.make_publisher_source() publisher_stranged = Account(**publisher_stranged_src) admin_src = AccountFixtureFactory.make_managing_editor_source() admin = Account(**admin_src) journal1.set_owner(publisher_owner) journal1.save(blocking=True) eissn = journal1.bibjson().get_one_identifier("eissn") pissn = journal1.bibjson().get_one_identifier("pissn") art_source = ArticleFixtureFactory.make_article_source(eissn=eissn, pissn=pissn) article = Article(**art_source) assert self.svc.has_permissions(publisher_stranged, article, False) assert self.svc.has_permissions(publisher_owner, article, True) assert self.svc.has_permissions(admin, article, True) failed_result = self.svc.has_permissions(publisher_stranged, article, True) assert failed_result == {'success': 0, 'fail': 1, 'update': 0, 'new': 0, 'shared': [], 'unowned': [pissn, eissn], 'unmatched': []}, "received: {}".format(failed_result)
def create_edit_cases(): application_source = ApplicationFixtureFactory.make_application_source() account_source = AccountFixtureFactory.make_publisher_source() editable_application = Suggestion(**application_source) editable_application.set_application_status(constants.APPLICATION_STATUS_UPDATE_REQUEST) non_editable_application = Suggestion(**application_source) non_editable_application.set_application_status(constants.APPLICATION_STATUS_READY) owner_account = Account(**deepcopy(account_source)) owner_account.set_id(editable_application.owner) non_owner_publisher = Account(**deepcopy(account_source)) non_publisher = Account(**deepcopy(account_source)) non_publisher.remove_role("publisher") admin = Account(**deepcopy(account_source)) admin.add_role("admin") return [ param("no_app_no_account", None, None, raises=exceptions.ArgumentException), param("no_app_with_account", None, owner_account, raises=exceptions.ArgumentException), param("app_no_account", editable_application, None, raises=exceptions.ArgumentException), param("editable_app_owning_account", editable_application, owner_account, expected=True), param("editable_app_nonowning_account", editable_application, non_owner_publisher, raises=exceptions.AuthoriseException), param("editable_app_non_publisher_account", editable_application, non_publisher, raises=exceptions.AuthoriseException), param("editable_app_admin_account", editable_application, admin, expected=True), param("non_editable_app_owning_account", non_editable_application, owner_account, raises=exceptions.AuthoriseException), param("non_editable_app_nonowning_account", non_editable_application, non_owner_publisher, raises=exceptions.AuthoriseException), param("non_editable_app_non_publisher_account", non_editable_application, non_publisher, raises=exceptions.AuthoriseException), param("non_editable_app_admin_account", non_editable_application, admin, expected=True) ]
def setUp(self): super(TestCreateOrUpdateArticle, self).setUp() self.publisher = Account() self.publisher.add_role("publisher") self.publisher.save(blocking=True) self.admin = Account() self.admin.add_role("admin") self.admin.save(blocking=True) sources = JournalFixtureFactory.make_many_journal_sources(2, True) self.journal1 = Journal(**sources[0]) self.journal1.set_owner(self.publisher.id) jbib1 = self.journal1.bibjson() jbib1.add_identifier(jbib1.P_ISSN, "1111-1111") jbib1.add_identifier(jbib1.E_ISSN, "2222-2222") self.journal1.save(blocking=True) self.publisher.add_journal(self.journal1) self.journal2 = Journal(**sources[1]) jbib2 = self.journal2.bibjson() jbib2.add_identifier(jbib2.P_ISSN, "1234-5678") jbib2.add_identifier(jbib2.E_ISSN, "9876-5432") self.journal2.save(blocking=True) self.article10 = Article(**ArticleFixtureFactory.make_article_source( pissn="1111-1111", eissn="2222-2222", doi="10.0000/article-10", fulltext="https://www.article10.com")) self.article10.set_id("articleid10") self.article10.save(blocking=True) self.article11 = Article(**ArticleFixtureFactory.make_article_source( pissn="1111-1111", eissn="2222-2222", doi="10.0000/article-11", fulltext="https://www.article11.com")) self.article11.set_id("articleid11") self.article11.save(blocking=True) self.article2 = Article(**ArticleFixtureFactory.make_article_source( pissn="1234-5678", eissn="9876-5432", doi="10.0000/article-2", fulltext="https://www.article2.com")) self.article2.set_id("articleid2") self.article2.save(blocking=True)
def setUp(self): super(TestBLLPrepareUpdatePublisher, self).setUp() self.svc = DOAJ.articleService() self.is_id_updated = self.svc._doi_or_fulltext_updated self.has_permission = self.svc.has_permissions self.merge = Article.merge acc_source = AccountFixtureFactory.make_publisher_source() self.publisher = Account(**acc_source)
def decorated_view(*args, **kwargs): api_key = request.values.get("api_key", None) if api_key is not None: user = Account.pull_by_api_key(api_key) if user is not None: if login_user(user, remember=False): return fn(*args, **kwargs) # else raise Api401Error("An API Key is required to access this.")
def load_application_cases(): account = Account(**AccountFixtureFactory.make_publisher_source()) account.set_id(account.makeid()) application = Suggestion(**ApplicationFixtureFactory.make_application_source()) application.makeid() wrong_id = uuid.uuid4() return [ param("a_id_acc_lock", application, application.id, account, True, raises=lock.Locked), param("a_id_acc_nolock", application, application.id, account, False), param("a_id_noacc_nolock", application, application.id, None, False), param("a_noid_noacc_nolock", application, None, None, False, raises=exceptions.ArgumentException), param("a_wid_noacc_nolock", application, wrong_id, None, False), param("noa_id_noacc_nolock", None, application.id, None, False), param("noa_noid_noacc_nolock", None, None, None, False, raises=exceptions.ArgumentException) ]
def load_journal_cases(): account = Account(**AccountFixtureFactory.make_publisher_source()) account.set_id(account.makeid()) journal = Journal(**JournalFixtureFactory.make_journal_source(in_doaj=True)) journal.set_id(journal.makeid()) wrong_id = uuid.uuid4() return [ param("j_id_acc_lock", journal, journal.id, account, True, raises=lock.Locked), param("j_id_acc_nolock", journal, journal.id, account, False), param("j_id_noacc_nolock", journal, journal.id, None, False), param("j_noid_noacc_nolock", journal, None, None, False, raises=exceptions.ArgumentException), param("j_wid_noacc_nolock", journal, wrong_id, None, False), param("noj_id_noacc_nolock", None, journal.id, None, False), param("noj_noid_noacc_nolock", None, None, None, False, raises=exceptions.ArgumentException) ]
def publishers_with_journals(): """ Get accounts for all publishers with journals in the DOAJ """ for acc in esprit.tasks.scroll(conn, 'account', q=publisher_query): account = Account(**acc) journal_ids = account.journal if journal_ids is not None: for j in journal_ids: journal = Journal.pull(j) if journal is not None and journal.is_in_doaj(): yield account break
def delete_user(username): prefix = "Delete existing user: "******"no such user:"******"deleted")
def test_03_view_application(self, name, account_type, role, owner, application_type, raises=None, returns=None, auth_reason=None): # set up the objects application = None if application_type == "exists": application = Suggestion( **ApplicationFixtureFactory.make_application_source()) account = None if account_type == "exists": if role == "none": account = Account( **AccountFixtureFactory.make_publisher_source()) account.remove_role("publisher") elif role == "publisher": account = Account( **AccountFixtureFactory.make_publisher_source()) elif role == "admin": account = Account( **AccountFixtureFactory.make_managing_editor_source()) if owner == "yes": application.set_owner(account.id) svc = DOAJ.authorisationService() if raises is not None and raises != "": exception = None with self.assertRaises(EXCEPTIONS[raises]): try: svc.can_view_application(account, application) except Exception as e: exception = e raise e if raises == "AuthoriseException": if auth_reason == "not_owner": assert exception.reason == exception.NOT_OWNER elif auth_reason == "wrong_role": assert exception.reason == exception.WRONG_ROLE elif returns is not None: expected = returns == "true" assert svc.can_view_application(account, application) is expected else: assert False, "Specify either raises or returns"
def decorated_view(*args, **kwargs): api_key = request.values.get("api_key", None) if api_key: user = Account.pull_by_api_key(api_key) if user is not None: if login_user(user, remember=False): return fn(*args, **kwargs) # else abort(401) # no api key, which is ok return fn(*args, **kwargs)
def test_03_view_application(self, name, account_type, role, owner, application_type, raises=None, returns=None, auth_reason=None): # set up the objects application = None if application_type == "exists": application = Suggestion(**ApplicationFixtureFactory.make_application_source()) account = None if account_type == "exists": if role == "none": account = Account(**AccountFixtureFactory.make_publisher_source()) account.remove_role("publisher") elif role == "publisher": account = Account(**AccountFixtureFactory.make_publisher_source()) elif role == "admin": account = Account(**AccountFixtureFactory.make_managing_editor_source()) if owner == "yes": application.set_owner(account.id) svc = DOAJ.authorisationService() if raises is not None and raises != "": exception = None with self.assertRaises(EXCEPTIONS[raises]): try: svc.can_view_application(account, application) except Exception as e: exception = e raise e if raises == "AuthoriseException": if auth_reason == "not_owner": assert exception.reason == exception.NOT_OWNER elif auth_reason == "wrong_role": assert exception.reason == exception.WRONG_ROLE elif returns is not None: expected = returns == "true" assert svc.can_view_application(account, application) is expected else: assert False, "Specify either raises or returns"
def setUp(self): super(TestAdminEditMetadata, self).setUp() admin_account = Account.make_account(username="******", name="Admin", email="*****@*****.**", roles=["admin"]) admin_account.set_password('password123') admin_account.save() publisher_account = Account.make_account(username="******", name="Publisher", email="*****@*****.**", roles=["publisher"]) publisher_account.set_password('password456') publisher_account.save(blocking=True) self.j = Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) self.j.save(blocking=True) self.a = Article(**ArticleFixtureFactory.make_article_source( in_doaj=True)) self.a.save(blocking=True)
def send_editor_email(journal): editor = Account.pull(journal.editor) eg = EditorGroup.pull_by_key("name", journal.editor_group) url_root = app.config.get("BASE_URL") to = [editor.email] fro = app.config.get('SYSTEM_EMAIL_FROM', '*****@*****.**') subject = app.config.get("SERVICE_NAME","") + " - new journal assigned to you" app_email.send_mail(to=to, fro=fro, subject=subject, template_name="email/journal_assigned_editor.txt", editor=editor.id.encode('utf-8', 'replace'), journal_name=journal.bibjson().title.encode('utf-8', 'replace'), group_name=eg.name.encode("utf-8", "replace"), url_root=url_root )
def load_j2a_cases(): journal = Journal(**JournalFixtureFactory.make_journal_source(in_doaj=True)) account_source = AccountFixtureFactory.make_publisher_source() owner_account = Account(**deepcopy(account_source)) owner_account.set_id(journal.owner) non_owner_publisher = Account(**deepcopy(account_source)) non_publisher = Account(**deepcopy(account_source)) non_publisher.remove_role("publisher") admin = Account(**deepcopy(account_source)) admin.add_role("admin") return [ param("no_journal_no_account", None, None, raises=exceptions.ArgumentException), param("no_journal_with_account", None, owner_account, raises=exceptions.ArgumentException), param("journal_no_account", journal, None, comparator=application_matches), param("journal_matching_account", journal, owner_account, comparator=application_matches), param("journal_unmatched_account", journal, non_owner_publisher, raises=exceptions.AuthoriseException), param("journal_non_publisher_account", journal, non_publisher, raises=exceptions.AuthoriseException), param("journal_admin_account", journal, admin, comparator=application_matches) ]
def send_editor_email(journal): editor = Account.pull(journal.editor) eg = EditorGroup.pull_by_key("name", journal.editor_group) url_root = app.config.get("BASE_URL") to = [editor.email] fro = app.config.get('SYSTEM_EMAIL_FROM', '*****@*****.**') subject = app.config.get("SERVICE_NAME", "") + " - new journal assigned to you" app_email.send_mail(to=to, fro=fro, subject=subject, template_name="email/journal_assigned_editor.txt", editor=editor.id.encode('utf-8', 'replace'), journal_name=journal.bibjson().title.encode( 'utf-8', 'replace'), group_name=eg.name.encode("utf-8", "replace"), url_root=url_root)
def test_01_accept_application(self, name, application_type, account_type, manual_update, provenance, raises, result_provenance, result_manual_update): ############################################### ## set up # create the application application = None if application_type == "save_fail": application = Suggestion(**ApplicationFixtureFactory.make_application_source()) application.save = mock_save Journal.save = mock_save elif application_type == "with_current_journal": application = Suggestion(**ApplicationFixtureFactory.make_application_source()) application.remove_notes() application.add_note("unique 1", "2002-01-01T00:00:00Z") application.add_note("duplicate", "2001-01-01T00:00:00Z") cj = application.current_journal journal = Journal(**JournalFixtureFactory.make_journal_source()) journal.set_id(cj) journal.remove_notes() journal.add_note("unique 2", "2003-01-01T00:00:00Z") journal.add_note("duplicate", "2001-01-01T00:00:00Z") journal.save(blocking=True) elif application_type == "no_current_journal": application = Suggestion(**ApplicationFixtureFactory.make_application_source()) application.remove_current_journal() acc = None if account_type == "not_allowed": acc = Account(**AccountFixtureFactory.make_publisher_source()) elif account_type == "allowed": acc = Account(**AccountFixtureFactory.make_managing_editor_source()) mu = None if manual_update in ["true", "false"]: mu = manual_update == "true" prov = None if provenance in ["true", "false"]: prov = provenance == "true" save = bool(randint(0,1)) ########################################################### # Execution svc = DOAJ.applicationService() if raises != "": with self.assertRaises(EXCEPTIONS[raises]): svc.accept_application(application, acc, mu, prov) else: journal = svc.accept_application(application, acc, mu, prov, save_journal=save, save_application=save) # we need to sleep, so the index catches up time.sleep(1) # check a few common things assert application.application_status == constants.APPLICATION_STATUS_ACCEPTED assert application.current_journal is None assert journal.current_application is None assert application.related_journal == journal.id related = journal.related_applications if application_type == "with_current_journal": assert len(related) == 3 elif application_type == "no_current_journal": assert len(related) == 1 assert related[0].get("application_id") == application.id assert related[0].get("date_accepted") is not None if result_manual_update == "yes": assert journal.last_manual_update is not None assert journal.last_manual_update != "1970-01-01T00:00:00Z" assert application.last_manual_update is not None assert application.last_manual_update != "1970-01-01T00:00:00Z" elif result_manual_update == "no": assert journal.last_manual_update is None assert application.last_manual_update is None if application_type == "with_current_journal": assert len(journal.notes) == 3 notevals = [note.get("note") for note in journal.notes] assert "duplicate" in notevals assert "unique 1" in notevals assert "unique 2" in notevals app_prov = Provenance.get_latest_by_resource_id(application.id) if result_provenance == "yes": assert app_prov is not None elif result_provenance == "no": assert app_prov is None if save: pass
from portality.models import Account if __name__ == "__main__": import argparse, getpass parser = argparse.ArgumentParser() parser.add_argument("-e", "--email", help="email address of user to make admin") args = parser.parse_args() if not args.email: print "Please specify an email with the -e option" exit() email = args.email acc = Account.pull(email) if not acc: print "Account with email " + email + " does not exist" current_roles = acc.role if "admin" not in current_roles: acc.set_role(current_roles + ["admin"]) acc.save()
def test_01_create_article(self, value, kwargs): article_arg = kwargs.get("article") account_arg = kwargs.get("account") get_duplicate_result_arg = kwargs.get("get_duplicate_result") role_arg = kwargs.get("role") merge_duplicate_arg = kwargs.get("merge_duplicate") add_journal_info_arg = kwargs.get("add_journal_info") dry_run_arg = kwargs.get("dry_run") update_article_id_arg = kwargs.get("update_article_id") has_ft_doi_changed_arg = kwargs.get("has_ft_doi_changed_arg") raises_arg = kwargs.get("raises") success_arg = kwargs.get("success") original_saved_arg = kwargs.get("original_saved") merge_saved_arg = kwargs.get("merge_saved") ############################################### ## set up success = int(success_arg) has_ft_doi_changed = True if has_ft_doi_changed_arg == "yes" else False merge_duplicate = None if merge_duplicate_arg != "none": merge_duplicate = True if merge_duplicate_arg == "true" else False add_journal_info = None if add_journal_info_arg != "none": add_journal_info = True if add_journal_info_arg == "true" else False dry_run = None if dry_run_arg != "none": dry_run = True if dry_run_arg == "true" else False raises = EXCEPTIONS.get(raises_arg) eissn = "1234-5678" pissn = "9876-5432" doi = "10.123/abc/1" fulltext = "http://example.com/1" another_doi = "10.123/duplicate-1" another_eissn = "1111-1111" another_pissn = "2222-2222" duplicate_id = None original_id = None update_article_id = None if add_journal_info: jsource = JournalFixtureFactory.make_journal_source(in_doaj=True) j = Journal(**jsource) bj = j.bibjson() bj.title = "Add Journal Info Title" bj.remove_identifiers() bj.add_identifier(bj.P_ISSN, pissn) bj.add_identifier(bj.E_ISSN, eissn) j.save(blocking=True) if get_duplicate_result_arg == 'different': source = ArticleFixtureFactory.make_article_source( eissn=another_eissn, pissn=another_pissn, doi=doi, fulltext=fulltext) del source["bibjson"]["journal"] duplicate = Article(**source) duplicate.save() duplicate_id = duplicate.id article_id_to_upload = None if article_arg == "exists": source = ArticleFixtureFactory.make_article_source( eissn=eissn, pissn=pissn, doi=doi, fulltext=fulltext) del source["bibjson"]["journal"] article = Article(**source) article.set_id() article_id_to_upload = article.id if get_duplicate_result_arg == "itself": source = ArticleFixtureFactory.make_article_source( eissn=another_eissn, pissn=another_pissn, doi=doi, fulltext=fulltext) del source["bibjson"]["journal"] duplicate = Article(**source) duplicate.set_id(article_id_to_upload) duplicate.save() duplicate_id = duplicate.id if update_article_id_arg != "none": another_source = ArticleFixtureFactory.make_article_source( eissn=eissn, pissn=pissn, doi=doi, fulltext=fulltext) original = Article(**another_source) original.save(blocking=True) original_id = original.id if update_article_id_arg == "doi_ft_not_changed": article.bibjson().title = "This needs to be updated" elif update_article_id_arg == "doi_ft_changed_duplicate": article.bibjson().remove_identifiers("doi") article.bibjson().add_identifier("doi", another_doi) elif update_article_id_arg == "doi_ft_changed_ok": article.bibjson().remove_identifiers("doi") article.bibjson().add_identifier("doi", "10.1234/updated") else: update_article_id = None account = None if account_arg != "none": source = AccountFixtureFactory.make_publisher_source() account = Account(**source) legit = True if account_arg == "owner" else False ilo_mock = BLLArticleMockFactory.is_legitimate_owner(legit=legit) self.svc.is_legitimate_owner = ilo_mock owned = [eissn, pissn] if account_arg == "owner" else [] shared = [] unowned = [eissn] if account_arg == "not_owner" else [] unmatched = [pissn] if account_arg == "not_owner" else [] ios_mock = BLLArticleMockFactory.issn_ownership_status( owned, shared, unowned, unmatched) self.svc.issn_ownership_status = ios_mock if role_arg == "admin": account.set_role("admin") account.save() if get_duplicate_result_arg == "none": gd_mock = BLLArticleMockFactory.get_duplicate(return_none=True) elif get_duplicate_result_arg == "itself": gd_mock = BLLArticleMockFactory.get_duplicate( eissn=eissn, pissn=pissn, doi=doi, fulltext=fulltext, given_article_id=original_id) elif get_duplicate_result_arg == "different": gd_mock = BLLArticleMockFactory.get_duplicate( eissn=another_eissn, pissn=another_pissn, doi=doi, fulltext=fulltext, given_article_id=duplicate_id) else: gd_mock = BLLArticleMockFactory.get_duplicate( given_article_id="exception") self.svc.get_duplicate = gd_mock mock_article = self.svc.get_duplicate(article) if role_arg == "admin" or (role_arg == "publisher" and account_arg == "owner"): has_permissions_mock = BLLArticleMockFactory.has_permissions(True) else: has_permissions_mock = BLLArticleMockFactory.has_permissions(False) self.svc.has_permissions = has_permissions_mock prepare_update_admin_mock = BLLArticleMockFactory._prepare_update_admin( get_duplicate_result_arg, update_article_id_arg) self.svc._prepare_update_admin = prepare_update_admin_mock prepare_update_publisher_mock = BLLArticleMockFactory._prepare_update_publisher( get_duplicate_result_arg, has_ft_doi_changed) self.svc._prepare_update_publisher = prepare_update_publisher_mock ########################################################### # Execution if raises is not None: with self.assertRaises(raises): self.svc.create_article(article, account, merge_duplicate=merge_duplicate, add_journal_info=add_journal_info, dry_run=dry_run, update_article_id=original_id) else: report = self.svc.create_article(article, account, merge_duplicate=merge_duplicate, add_journal_info=add_journal_info, dry_run=dry_run, update_article_id=original_id) assert report["success"] == success # check that the article was saved and if it was saved that it was suitably merged if original_saved_arg == "yes" and update_article_id is not None: if get_duplicate_result_arg == "itself": original = Article.pull(update_article_id) assert original is not None assert report["update"] == 1, "update: {}".format( report["update"]) assert report["new"] == 0, "update: {}".format( report["new"]) elif original_saved_arg == "yes": if get_duplicate_result_arg == "itself": new = Article.pull(article_id_to_upload) assert new is not None assert report["update"] == 1, "update: {}".format( report["update"]) assert report["new"] == 0, "update: {}".format( report["new"]) elif get_duplicate_result_arg == "none": new = Article.pull(article_id_to_upload) assert new is not None assert report["update"] == 0, "update: {}".format( report["update"]) assert report["new"] == 1, "update: {}".format( report["new"]) if merge_saved_arg == "yes": merged = Article.pull(mock_article.id) assert merged is not None assert report["update"] == 1 elif mock_article is not None and mock_article.id != original_id: merged = Article.pull(mock_article.id) assert merged is None, "merged: {}".format(merged) if add_journal_info: assert article.bibjson( ).journal_title == "Add Journal Info Title" if update_article_id_arg == "doi_ft_changed_ok": original = Article.pull(original_id) assert original is not None elif update_article_id_arg == "doi_ft_not_changed": original = Article.pull(original_id) assert original is not None
def get_editor_account(self): return Account.pull(self.editor)
def test_01_discover_duplicates(self, name, kwargs): article_arg = kwargs.get("article") owner_arg = kwargs.get("owner") article_doi_arg = kwargs.get("article_doi") doi_duplicate_arg = kwargs.get("doi_duplicate") article_fulltext_arg = kwargs.get("article_fulltext") fulltext_duplicate_arg = kwargs.get("fulltext_duplicate") articles_by_doi_arg = kwargs.get("articles_by_doi") articles_by_fulltext_arg = kwargs.get("articles_by_fulltext") raises_arg = kwargs.get("raises") raises = EXCEPTIONS.get(raises_arg) ############################################### ## set up owner = None if owner_arg != "none": owner = Account(**AccountFixtureFactory.make_publisher_source()) owner_id = None if owner is not None: owner_id = owner.id # create a journal for the owner if owner_arg not in ["none"]: source = JournalFixtureFactory.make_journal_source(in_doaj=True) journal = Journal(**source) journal.set_owner(owner.id) journal.bibjson().remove_identifiers() journal.bibjson().add_identifier("eissn", "1234-5678") journal.bibjson().add_identifier("pissn", "9876-5432") journal.save() # determine what we need to load into the index article_ids = [] aids_block = [] if owner_arg not in ["none", "no_articles"]: for i, ident in enumerate(IDENTS): the_doi = ident["doi"] if doi_duplicate_arg == "padded": the_doi = " " + the_doi + " " elif doi_duplicate_arg == "prefixed": the_doi = "https://dx.doi.org/" + the_doi the_fulltext = ident["fulltext"] if article_fulltext_arg != "invalid": if fulltext_duplicate_arg == "padded": the_fulltext = " http:" + the_fulltext elif fulltext_duplicate_arg == "http": the_fulltext = "http:" + the_fulltext elif fulltext_duplicate_arg == "https": the_fulltext = "https:" + the_fulltext else: the_fulltext = "http:" + the_fulltext source = ArticleFixtureFactory.make_article_source( eissn="1234-5678", pissn="9876-5432", doi=the_doi, fulltext=the_fulltext) article = Article(**source) article.set_id() article.save(blocking=True) article_ids.append(article.id) aids_block.append((article.id, article.last_updated)) # generate our incoming article article = None doi = None fulltext = None if article_arg == "yes": eissn = "1234=5678" # one matching pissn = "6789-1234" # the other not - issn matches are not relevant to this test if article_doi_arg in ["yes", "padded"]: doi = "10.1234/abc/11" if doi_duplicate_arg in ["yes", "padded"]: doi = IDENTS[0]["doi"] if article_doi_arg == "padded": doi = " doi:" + doi + " " elif article_doi_arg in ["invalid"]: doi = IDENTS[-1]["doi"] if article_fulltext_arg in ["yes", "padded", "https"]: fulltext = "//example.com/11" if fulltext_duplicate_arg in ["yes", "padded", "https"]: fulltext = IDENTS[0]["fulltext"] if fulltext_duplicate_arg == "padded": fulltext = " http:" + fulltext + " " elif fulltext_duplicate_arg == "https": fulltext = "https:" + fulltext else: fulltext = "http:" + fulltext elif article_fulltext_arg == "invalid": fulltext = IDENTS[-1]["fulltext"] source = ArticleFixtureFactory.make_article_source( eissn=eissn, pissn=pissn, doi=doi, fulltext=fulltext) article = Article(**source) # we need to do this if doi or fulltext are none, because the factory will set a default if we don't # provide them if doi is None: article.bibjson().remove_identifiers("doi") if fulltext is None: article.bibjson().remove_urls("fulltext") article.set_id() Article.blockall(aids_block) ########################################################### # Execution svc = DOAJ.articleService() if raises is not None: with self.assertRaises(raises): svc.discover_duplicates(article) else: possible_articles = svc.discover_duplicates(article) if articles_by_doi_arg == "yes": assert "doi" in possible_articles assert len(possible_articles["doi"]) == 1 # if this is the "invalid" doi, then we expect it to match the final article, otherwise match the first if article_doi_arg == "invalid": assert possible_articles["doi"][0].id == article_ids[-1] else: assert possible_articles["doi"][0].id == article_ids[0] else: if possible_articles is not None: assert "doi" not in possible_articles if articles_by_fulltext_arg == "yes": assert "fulltext" in possible_articles assert len(possible_articles["fulltext"]) == 1 # if this is the "invalid" fulltext url, then we expect it to match the final article, otherwise match the first if article_fulltext_arg == "invalid": assert possible_articles["fulltext"][0].id == article_ids[ -1] else: assert possible_articles["fulltext"][0].id == article_ids[ 0] else: if possible_articles is not None: assert "fulltext" not in possible_articles
args = parser.parse_args() if not args.name: print "Please specify a unique third party organisation name with the -n option" exit() name = args.name cname = args.contactname cemail = args.contactemail register = args.register stats = args.stats admin = args.admin token = uuid.uuid4().hex acc = Account.pull_by_name(name) if not acc: acc = Account() acc.set_name(name) acc.set_contact(cname, cemail) acc.allow_registry_access(register) acc.allow_statistics_access(stats) acc.allow_admin_access(admin) acc.set_auth_token(token) acc.save() print "auth token set to", token
def migrate_contacts(source, batch_size=1000): # read in the content f = open(source) xml = etree.parse(f) f.close() contacts = xml.getroot() print "migrating", str(len(contacts)), "contact records from", source # first thing to do is locate all the duplicates in the logins record = [] duplicates = [] for element in contacts: login = element.find("login") if login is not None and login.text is not None and login.text != "": if login.text in record: duplicates.append(login.text) record.append(login.text) # now go through and load all the user accounts batch = [] for element in contacts: login = element.find("login") password = element.find("password") name = element.find("name") email = element.find("email") issns = element.findall("issn") if login is None or login.text is None or login.text == "": print "ERROR: contact without login - providing login" if len(issns) == 0: # make a random 8 character login name login = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(8)) else: # select the first issn login = issns[0].text else: login = login.text if password is None or password.text is None or password.text == "": print "ERROR: contact without password", login, "- providing one" # make a random 8 character password password = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(8)) else: password = password.text # check to see if this is a duplicate if login in duplicates: if len(issns) == 0: print "INFO: duplicate detected, has no ISSNs, so skipping", login continue else: print "INFO: duplicate detected, with ISSNs, so keeping", login a = Account() a.set_id(login) a.set_password(password) if name is not None and name.text is not None and name.text != "": a.set_name(name.text) if email is not None and email.text is not None and email.text != "": a.set_email(email.text) for issn in issns: if issn is not None and issn.text is not None and issn.text != "": jid = _get_journal_id_from_issn(issn.text) a.add_journal(jid) a.prep() # prep for saving, since we're not actually going to call save() batch.append(a.data) if len(batch) >= batch_size: Account.bulk(batch, refresh=True) del batch[:] if len(batch) > 0: Account.bulk(batch)
def create_edit_cases(): application_source = ApplicationFixtureFactory.make_application_source() account_source = AccountFixtureFactory.make_publisher_source() editable_application = Suggestion(**application_source) editable_application.set_application_status( constants.APPLICATION_STATUS_UPDATE_REQUEST) non_editable_application = Suggestion(**application_source) non_editable_application.set_application_status( constants.APPLICATION_STATUS_READY) owner_account = Account(**deepcopy(account_source)) owner_account.set_id(editable_application.owner) non_owner_publisher = Account(**deepcopy(account_source)) non_publisher = Account(**deepcopy(account_source)) non_publisher.remove_role("publisher") admin = Account(**deepcopy(account_source)) admin.add_role("admin") return [ param("no_app_no_account", None, None, raises=exceptions.ArgumentException), param("no_app_with_account", None, owner_account, raises=exceptions.ArgumentException), param("app_no_account", editable_application, None, raises=exceptions.ArgumentException), param("editable_app_owning_account", editable_application, owner_account, expected=True), param("editable_app_nonowning_account", editable_application, non_owner_publisher, raises=exceptions.AuthoriseException), param("editable_app_non_publisher_account", editable_application, non_publisher, raises=exceptions.AuthoriseException), param("editable_app_admin_account", editable_application, admin, expected=True), param("non_editable_app_owning_account", non_editable_application, owner_account, raises=exceptions.AuthoriseException), param("non_editable_app_nonowning_account", non_editable_application, non_owner_publisher, raises=exceptions.AuthoriseException), param("non_editable_app_non_publisher_account", non_editable_application, non_publisher, raises=exceptions.AuthoriseException), param("non_editable_app_admin_account", non_editable_application, admin, expected=True) ]
def get_associate_accounts(self): accs = [] for a in self.associates: acc = Account.pull(a) accs.append(acc) return accs
def test_01_delete_application(self, name, application_type, account_type, current_journal, related_journal, raises): ############################################### ## set up # create the test application (if needed), and the associated current_journal and related_journal in suitable states application = None cj = None rj = None if application_type == "found" or application_type == "locked": application = Suggestion(**ApplicationFixtureFactory.make_application_source()) if current_journal == "none": application.remove_current_journal() elif current_journal == "not_found": application.set_current_journal("123456789987654321") elif current_journal == "found": cj = Journal(**JournalFixtureFactory.make_journal_source()) cj.set_id(cj.makeid()) cj.save(blocking=True) application.set_current_journal(cj.id) elif current_journal == "locked": cj = Journal(**JournalFixtureFactory.make_journal_source()) cj.set_id(cj.makeid()) cj.save(blocking=True) application.set_current_journal(cj.id) lock.lock(constants.LOCK_JOURNAL, cj.id, "otheruser") if related_journal == "none": application.remove_related_journal() elif related_journal == "not_found": application.set_related_journal("123456789987654321") elif related_journal == "found": rj = Journal(**JournalFixtureFactory.make_journal_source()) rj.set_id(rj.makeid()) rj.save(blocking=True) application.set_related_journal(rj.id) elif related_journal == "locked": rj = Journal(**JournalFixtureFactory.make_journal_source()) rj.set_id(rj.makeid()) rj.save(blocking=True) application.set_related_journal(rj.id) lock.lock(constants.LOCK_JOURNAL, rj.id, "otheruser") acc = None if account_type != "none": acc = Account(**AccountFixtureFactory.make_publisher_source()) if account_type == "not_permitted": acc.remove_role("publisher") if application_type == "locked": thelock = lock.lock(constants.LOCK_APPLICATION, application.id, "otheruser") # we can't explicitly block on the lock, but we can halt until we confirm it is saved thelock.blockall([(thelock.id, thelock.last_updated)]) application_id = None if application is not None: if acc is not None: application.set_owner(acc.id) application.save(blocking=True) application_id = application.id elif application_type == "not_found": application_id = u"sdjfasofwefkwflkajdfasjd" ########################################################### # Execution svc = DOAJ.applicationService() if raises != "": with self.assertRaises(EXCEPTIONS[raises]): svc.delete_application(application_id, acc) time.sleep(1) check_locks(application, cj, rj, acc) else: svc.delete_application(application_id, acc) # we need to sleep, so the index catches up time.sleep(1) # check that no locks remain set for this user check_locks(application, cj, rj, acc) # check that the application actually is gone if application is not None: assert Suggestion.pull(application.id) is None # check that the current journal no longer has a reference to the application if cj is not None: cj = Journal.pull(cj.id) assert cj.current_application is None # check that the related journal has a record that the application was deleted if rj is not None: rj = Journal.pull(rj.id) record = rj.related_application_record(application.id) assert "status" in record assert record["status"] == "deleted"
def test_01_issn_ownership_status(self, name, kwargs): article_arg = kwargs.get("article") owner_arg = kwargs.get("owner") article_eissn_arg = kwargs.get("article_eissn") article_pissn_arg = kwargs.get("article_pissn") seen_eissn_arg = kwargs.get("seen_eissn") seen_pissn_arg = kwargs.get("seen_pissn") journal_owner_arg = kwargs.get("journal_owner") raises_arg = kwargs.get("raises") raises = EXCEPTIONS.get(raises_arg) ############################################### ## set up owner = None if owner_arg != "none": owner = Account(**AccountFixtureFactory.make_publisher_source()) owner_id = None if owner is not None: owner_id = owner.id # generate our incoming article article = None eissn = None pissn = None if article_arg == "exists": source = ArticleFixtureFactory.make_article_source() article = Article(**source) article.set_id() article.bibjson().remove_identifiers("pissn") if article_pissn_arg == "yes": pissn = "1234-5678" article.bibjson().add_identifier("pissn", pissn) article.bibjson().remove_identifiers("eissn") if article_eissn_arg == "yes": eissn = "9876-5432" article.bibjson().add_identifier("eissn", eissn) issns = [] if eissn is not None and pissn is not None and seen_eissn_arg == "yes" and seen_pissn_arg == "yes": issns.append((eissn, pissn)) if eissn is not None and seen_eissn_arg == "yes": issns.append((eissn, "4321-9876")) issns.append((eissn, None)) if pissn is not None and seen_pissn_arg == "yes": issns.append(("6789-4321", pissn)) issns.append((None, pissn)) owners = [] if journal_owner_arg == "none": owners = [None] elif journal_owner_arg == "correct" and owner_id is not None: owners = [owner_id] elif journal_owner_arg == "incorrect": owners = ["randomowner"] elif journal_owner_arg == "mix" and owner_id is not None: owners.append(owner_id) owners.append("randomowner") owners.append(None) mock = ModelJournalMockFactory.find_by_issn(issns, owners) Journal.find_by_issn = mock ########################################################### # Execution svc = DOAJ.articleService() if raises is not None: with self.assertRaises(raises): svc.issn_ownership_status(article, owner_id) else: owned, shared, unowned, unmatched = svc.issn_ownership_status( article, owner_id) owned_count = 0 if seen_eissn_arg == "yes" and eissn is not None and journal_owner_arg in [ "correct" ]: assert eissn in owned owned_count += 1 elif eissn is not None: assert eissn not in owned if seen_pissn_arg == "yes" and pissn is not None and journal_owner_arg in [ "correct" ]: assert pissn in owned owned_count += 1 elif pissn is not None: assert pissn not in owned assert len(owned) == owned_count shared_count = 0 if seen_eissn_arg == "yes" and eissn is not None and journal_owner_arg in [ "mix" ]: assert eissn in shared shared_count += 1 elif eissn is not None: assert eissn not in shared if seen_pissn_arg == "yes" and pissn is not None and journal_owner_arg in [ "mix" ]: assert pissn in shared shared_count += 1 elif pissn is not None: assert pissn not in shared assert len(shared) == shared_count unowned_count = 0 if seen_eissn_arg == "yes" and eissn is not None and journal_owner_arg in [ "incorrect", "none" ]: assert eissn in unowned unowned_count += 1 elif eissn is not None: assert eissn not in unowned if seen_pissn_arg == "yes" and pissn is not None and journal_owner_arg in [ "incorrect", "none" ]: assert pissn in unowned unowned_count += 1 elif pissn is not None: assert pissn not in unowned assert len(unowned) == unowned_count unmatched_count = 0 if seen_eissn_arg == "no" and eissn is not None: assert eissn in unmatched unmatched_count += 1 elif eissn is not None: assert eissn not in unmatched if seen_pissn_arg == "no" and pissn is not None: assert pissn in unmatched unmatched_count += 1 elif pissn is not None: assert pissn not in unmatched assert len(unmatched) == unmatched_count
def create_account_on_suggestion_approval(suggestion, journal): o = Account.pull(suggestion.owner) if o: flash('Account {username} already exists, so simply associating the new journal with it.'.format(username=o.id), 'success') o.add_journal(journal.id) if not o.has_role('publisher'): o.add_role('publisher') o.save() return o suggestion_contact = util.listpop(suggestion.contacts()) if not suggestion_contact.get('email'): msg = ERROR_MSG_TEMPLATE.format(username=o.id, missing_thing='journal contact email in the application') app.logger.error(msg) flash(msg) return o send_info_to = suggestion_contact.get('email') o = Account.make_account( suggestion.owner, name=suggestion_contact.get('name'), email=send_info_to, roles=['publisher'], associated_journal_ids=[journal.id] ) o.save() url_root = request.url_root if url_root.endswith("/"): url_root = url_root[:-1] if not o.reset_token: msg = ERROR_MSG_TEMPLATE.format(username=o.id, missing_thing='reset token') app.logger.error(msg) flash(msg) return o reset_url = url_root + url_for('account.reset', reset_token=o.reset_token) forgot_pw_url = url_root + url_for('account.forgot') password_create_timeout_seconds = int(app.config.get("PASSWORD_CREATE_TIMEOUT", app.config.get('PASSWORD_RESET_TIMEOUT', 86400) * 14)) password_create_timeout_days = password_create_timeout_seconds / (60*60*24) to = [send_info_to] fro = app.config.get('SYSTEM_EMAIL_FROM', '*****@*****.**') subject = app.config.get("SERVICE_NAME","") + " - account created" try: if app.config.get("ENABLE_PUBLISHER_EMAIL", False): app_email.send_mail(to=to, fro=fro, subject=subject, template_name="email/account_created.txt", reset_url=reset_url, username=o.id, timeout_days=password_create_timeout_days, forgot_pw_url=forgot_pw_url ) flash('Sent email to ' + send_info_to + ' to tell them about the new account.', 'success') else: flash('Did not email to ' + send_info_to + ' to tell them about the new account, as publisher emailing is disabled.', 'error') if app.config.get('DEBUG',False): util.flash_with_url('Debug mode - url for create is <a href="{url}">{url}</a>'.format(url=reset_url)) except Exception as e: magic = str(uuid.uuid1()) util.flash_with_url('Hm, sending the account creation email didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!', 'error') if app.config.get('DEBUG',False): util.flash_with_url('Debug mode - url for create is <a href="{url}">{url}</a>'.format(url=reset_url)) app.logger.error(magic + "\n" + repr(e)) raise e flash('Account {username} created'.format(username=o.id), 'success') return o
def test_01_update_request(self, name, journal_id, journal_lock, account, account_role, account_is_owner, current_applications, application_lock, application_status, completed_applications, raises, return_app, return_jlock, return_alock, db_jlock, db_alock, db_app): ############################################### ## set up # create the journal journal = None jid = None if journal_id == "valid": journal = Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) journal.remove_related_applications() journal.remove_current_application() jid = journal.id elif journal_id == "not_in_doaj": journal = Journal(**JournalFixtureFactory.make_journal_source( in_doaj=False)) journal.remove_related_applications() journal.remove_current_application() jid = journal.id elif journal_id == "missing": jid = uuid.uuid4().hex acc = None if account == "yes": acc = Account(**AccountFixtureFactory.make_publisher_source()) if account_role == "none": acc.remove_role("publisher") elif account_role == "admin": acc.remove_role("publisher") acc.add_role("admin") acc.set_id(acc.makeid()) if account_is_owner == "yes": acc.set_id(journal.owner) if journal_lock == "yes": lock.lock("journal", jid, "someoneelse", blocking=True) latest_app = None current_app_count = int(current_applications) for i in range(current_app_count): app = Suggestion( **ApplicationFixtureFactory.make_application_source()) app.set_id(app.makeid()) app.set_created("198" + str(i) + "-01-01T00:00:00Z") app.set_current_journal(jid) app.save() latest_app = app if journal is not None: journal.set_current_application(app.id) comp_app_count = int(completed_applications) for i in range(comp_app_count): app = Suggestion( **ApplicationFixtureFactory.make_application_source()) app.set_id(app.makeid()) app.set_created("197" + str(i) + "-01-01T00:00:00Z") app.set_related_journal(jid) app.save() if journal is not None: journal.add_related_application(app.id, date_accepted=app.created_date) if current_app_count == 0 and comp_app_count == 0: # save at least one record to initialise the index mapping, otherwise tests fail app = Suggestion( **ApplicationFixtureFactory.make_application_source()) app.set_id(app.makeid()) app.save() if application_lock == "yes": lock.lock("suggestion", latest_app.id, "someoneelse", blocking=True) if application_status != "n/a": latest_app.set_application_status(application_status) latest_app.save(blocking=True) # finally save the journal record, ensuring we get a blocking save, so everything # above here should be synchronised with the repo if journal is not None: journal.save(blocking=True) ########################################################### # Execution svc = DOAJ.applicationService() if raises != "": with self.assertRaises(EXCEPTIONS[raises]): svc.update_request_for_journal(jid, acc) else: application, jlock, alock = svc.update_request_for_journal( jid, acc) # we need to sleep, so the index catches up time.sleep(1) if return_app == "none": assert application is None elif return_app == "yes": assert application is not None if return_jlock == "none": assert jlock is None elif return_jlock == "yes": assert jlock is not None if return_alock == "none": assert alock is None elif return_alock == "yes": assert alock is not None if db_jlock == "no" and acc is not None: assert not lock.has_lock("journal", jid, acc.id) elif db_jlock == "yes" and acc is not None: l = lock.has_lock("journal", jid, acc.id) assert lock.has_lock("journal", jid, acc.id) if db_alock == "no" and application.id is not None and acc is not None: assert not lock.has_lock("suggestion", application.id, acc.id) elif db_alock == "yes" and application.id is not None and acc is not None: assert lock.has_lock("suggestion", application.id, acc.id) if db_app == "no" and application.id is not None: indb = Suggestion.q2obj(q="id.exact:" + application.id) assert indb is None elif db_app == "yes" and application.id is not None: indb = Suggestion.q2obj(q="id.exact:" + application.id) assert indb is not None if current_app_count == 0 and comp_app_count == 0 and application is not None: assert application.article_metadata is None assert application.articles_last_year is None elif application is not None: assert application.article_metadata is not None assert application.articles_last_year is not None
def test_01_is_legitimate_owner(self, name, kwargs): article_arg = kwargs.get("article") owner_arg = kwargs.get("owner") article_eissn_arg = kwargs.get("article_eissn") article_pissn_arg = kwargs.get("article_pissn") seen_eissn_arg = kwargs.get("seen_eissn") seen_pissn_arg = kwargs.get("seen_pissn") journal_owner_arg = kwargs.get("journal_owner") raises_arg = kwargs.get("raises") legit_arg = kwargs.get("legit") raises = EXCEPTIONS.get(raises_arg) ############################################### ## set up owner = None if owner_arg != "none": owner = Account(**AccountFixtureFactory.make_publisher_source()) owner_id = None if owner is not None: owner_id = owner.id # generate our incoming article article = None eissn = None pissn = None if article_arg == "exists": source = ArticleFixtureFactory.make_article_source() article = Article(**source) article.set_id() article.bibjson().remove_identifiers("pissn") if article_pissn_arg == "yes": pissn = "1234-5678" article.bibjson().add_identifier("pissn", pissn) article.bibjson().remove_identifiers("eissn") if article_eissn_arg == "yes": eissn = "9876-5432" article.bibjson().add_identifier("eissn", eissn) # assemble the issns that will appear to be in the index. One that is irrelevant, and just # serves to be "noise" in the database, and the other that matches the spec required by # the test issns = [("1111-1111", "2222-2222")] if eissn is not None and pissn is not None and seen_eissn_arg == "yes" and seen_pissn_arg == "yes": issns.append((eissn, pissn)) if eissn is not None and seen_eissn_arg == "yes": issns.append((eissn, None)) if pissn is not None and seen_pissn_arg == "yes": issns.append((None, pissn)) owners = [] if journal_owner_arg == "none": owners = [None] elif journal_owner_arg == "correct" and owner_id is not None: owners = [owner_id] elif journal_owner_arg == "incorrect": owners = ["randomowner"] elif journal_owner_arg == "mix" and owner_id is not None: owners.append(owner_id) owners.append("randomowner") owners.append(None) mock = ModelJournalMockFactory.find_by_issn(issns, owners) Journal.find_by_issn = mock ########################################################### # Execution svc = DOAJ.articleService() if raises is not None: with self.assertRaises(raises): svc.is_legitimate_owner(article, owner_id) else: legit = svc.is_legitimate_owner(article, owner_id) if legit_arg == "no": assert legit is False elif legit_arg == "yes": assert legit is True
"--password", help="password for the new or existing user. If omitted, you will be prompted for one on the next line", ) parser.add_argument("-r", "--role", help="comma separated list of roles to be held by this account") args = parser.parse_args() if not args.username: print "Please specify a username with the -u option" exit() if not args.role: print "WARNING: no role specified, so this user won't be able to do anything" username = args.username email = args.email password = None roles = [r.strip() for r in args.role.split(",")] if args.role is not None else [] if args.password: password = args.password else: password = input_password() acc = Account.pull(username) if not acc: acc = Account({"id": username, "email": email}) acc.set_role(roles) acc.set_password(password) acc.save()
def journals_applications_provenance(outfile_applications, outfile_accounts, outfile_reapps, conn): with codecs.open(outfile_applications, "wb", "utf-8") as f, codecs.open(outfile_accounts, "wb", "utf-8") as g, codecs.open(outfile_reapps, "wb", "utf-8") as h: out_applications = csv.writer(f) out_applications.writerow(["Journal ID", "Journal Created", "Journal Reapplied", "Application ID", "Application Last Updated", "Application Status", "Published Diff", "Latest Edit Recorded", "Latest Accepted Recorded"]) out_accounts = csv.writer(g) out_accounts.writerow(["Journal ID", "Journal Created", "Journal Reapplied", "In DOAJ", "Missing Account ID"]) out_reapps = csv.writer(h) out_reapps.writerow(["Journal ID", "Journal Created", "Journal Reapplied", "Application ID", "Application Created", "Application Last Updated", "Application Last Manual Update", "Application Status", "Published Diff"]) counter = 0 for result in esprit.tasks.scroll(conn, "journal", keepalive="45m"): counter += 1 journal = Journal(**result) print counter, journal.id # first figure out if there is a broken related application issns = journal.bibjson().issns() applications = Suggestion.find_by_issn(issns) latest = None for application in applications: if latest is None: latest = application if application.last_updated_timestamp > latest.last_updated_timestamp: latest = application if latest is None: continue jcreated = journal.created_timestamp reapp = journal.last_update_request print counter, journal.id, reapp if reapp is not None: jcreated = datetime.strptime(reapp, "%Y-%m-%dT%H:%M:%SZ") jcreated = adjust_timestamp(jcreated, JOURNAL_TIMEZONE_CUTOFF) app_lustamp = adjust_timestamp(latest.last_updated_timestamp, APP_TIMEZONE_CUTOFF) # app_man_lustamp = latest.last_manual_update_timestamp # no need to adjust this one app_man_lustamp = adjust_timestamp(latest.last_manual_update_timestamp, APP_TIMEZONE_CUTOFF) td = jcreated - app_lustamp mtd = jcreated - app_man_lustamp diff = td.total_seconds() mdiff = mtd.total_seconds() # was the journal created after the application by greater than the threshold? if diff > THRESHOLD: last_edit = "" last_accept = "" edit_query = deepcopy(PROV_QUERY) edit_query["query"]["bool"]["must"][0]["term"]["resource_id.exact"] = latest.id edit_query["query"]["bool"]["must"][1]["term"]["action.exact"] = "edit" provs = Provenance.q2obj(q=edit_query) if len(provs) > 0: last_edit = provs[0].last_updated accept_query = deepcopy(PROV_QUERY) accept_query["query"]["bool"]["must"][0]["term"]["resource_id.exact"] = latest.id accept_query["query"]["bool"]["must"][1]["term"]["action.exact"] = "status:accepted" provs = Provenance.q2obj(q=accept_query) if len(provs) > 0: last_accept = provs[0].last_updated out_applications.writerow([journal.id, journal.created_date, journal.last_update_request, latest.id, latest.last_updated, latest.application_status, diff, last_edit, last_accept]) # was the journal (in doaj) created before the application by greater than the threshold, and is it in a state other than rejected if mdiff < -1 * THRESHOLD and latest.application_status != constants.APPLICATION_STATUS_REJECTED and journal.is_in_doaj(): out_reapps.writerow([journal.id, journal.created_date, journal.last_update_request, latest.id, latest.created_date, latest.last_updated, latest.last_manual_update, latest.application_status, mdiff]) # now figure out if the account is missing owner = journal.owner if owner is None: out_accounts.writerow([journal.id, journal.created_date, journal.last_update_request, str(journal.is_in_doaj()), "NO OWNER"]) else: acc = Account.pull(owner) if acc is None: out_accounts.writerow([journal.id, journal.created_date, journal.last_update_request, str(journal.is_in_doaj()), owner]) print "processed", counter, "journals"
class TestCreateOrUpdateArticle(DoajTestCase): def setUp(self): super(TestCreateOrUpdateArticle, self).setUp() self.publisher = Account() self.publisher.add_role("publisher") self.publisher.save(blocking=True) self.admin = Account() self.admin.add_role("admin") self.admin.save(blocking=True) sources = JournalFixtureFactory.make_many_journal_sources(2, True) self.journal1 = Journal(**sources[0]) self.journal1.set_owner(self.publisher.id) jbib1 = self.journal1.bibjson() jbib1.add_identifier(jbib1.P_ISSN, "1111-1111") jbib1.add_identifier(jbib1.E_ISSN, "2222-2222") self.journal1.save(blocking=True) self.publisher.add_journal(self.journal1) self.journal2 = Journal(**sources[1]) jbib2 = self.journal2.bibjson() jbib2.add_identifier(jbib2.P_ISSN, "1234-5678") jbib2.add_identifier(jbib2.E_ISSN, "9876-5432") self.journal2.save(blocking=True) self.article10 = Article(**ArticleFixtureFactory.make_article_source( pissn="1111-1111", eissn="2222-2222", doi="10.0000/article-10", fulltext="https://www.article10.com")) self.article10.set_id("articleid10") self.article10.save(blocking=True) self.article11 = Article(**ArticleFixtureFactory.make_article_source( pissn="1111-1111", eissn="2222-2222", doi="10.0000/article-11", fulltext="https://www.article11.com")) self.article11.set_id("articleid11") self.article11.save(blocking=True) self.article2 = Article(**ArticleFixtureFactory.make_article_source( pissn="1234-5678", eissn="9876-5432", doi="10.0000/article-2", fulltext="https://www.article2.com")) self.article2.set_id("articleid2") self.article2.save(blocking=True) def tearDown(self): super(TestCreateOrUpdateArticle, self).tearDown() def test_00_no_doi_and_url_changed(self): ba = self.article10.bibjson() ba.title = "Updated Article" # try for admin resp = ArticleService.create_article( self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) assert resp["success"] == 1, "expected 1 updated, received: {}".format( resp) assert resp["update"] == 1, "expected 1 updated, received: {}".format( resp) assert resp["new"] == 0, "expected 1 updated, received: {}".format( resp) assert self.article10.bibjson().title == "Updated Article", "Expected `Updated Article`, received: {}" \ .format(self.article10.bibjson().title) ba.title = "Updated 2nd time" # try for publisher resp = ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) assert resp["success"] == 1, "expected 1 updated, received: {}".format( resp) assert resp["update"] == 1, "expected 1 updated, received: {}".format( resp) assert resp["new"] == 0, "expected 1 updated, received: {}".format( resp) assert self.article10.bibjson().title == "Updated 2nd time", "Expected `Updated 2nd time`, received: {}" \ .format(self.article10.bibjson().title) def test_01_new_doi_new_url(self): ba = self.article10.bibjson() ba.remove_identifiers(ba.DOI) ba.remove_urls(ba.FULLTEXT) ba.add_identifier(ba.DOI, "10.0000/NEW") ba.add_url(ba.FULLTEXT, "https://www.UPDATED.com") #for publisher resp = ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) assert resp["success"] == 1, "expected 1 new, received: {}".format( resp) assert resp["update"] == 0, "expected 1 new, received: {}".format(resp) assert resp["new"] == 1, "expected 1 new, received: {}".format(resp) #for admin resp = ArticleService.create_article( self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) assert resp["success"] == 1, "expected 1 new, received: {}".format( resp) assert resp["update"] == 1, "expected 1 new, received: {}".format(resp) assert resp["new"] == 0, "expected 1 new, received: {}".format(resp) def test_02_old_doi_existing_url_admin(self): ba = self.article10.bibjson() ba.remove_urls(ba.FULLTEXT) # check for url from other article owned by the same publisher ba.add_url(self.article11.bibjson().get_single_url(ba.FULLTEXT), ba.FULLTEXT) # try as a publisher with self.assertRaises(ArticleMergeConflict): ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) # try as an admin with self.assertRaises(ArticleMergeConflict): ArticleService.create_article(self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) # check for url from other article owned by someone else ba.remove_urls(ba.FULLTEXT) ba.add_url(self.article2.bibjson().get_single_url(ba.FULLTEXT), ba.FULLTEXT) # try as a publisher with self.assertRaises(ArticleMergeConflict): ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) # try as an admin with self.assertRaises(ArticleMergeConflict): ArticleService.create_article(self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) def test_03_existing_doi_old_url_admin(self): ba = self.article10.bibjson() ba.remove_identifiers(ba.DOI) # check for DOI from other article owned by the same publisher ba.add_identifier(ba.DOI, "10.0000/article-11") # try as a publisher with self.assertRaises(ArticleMergeConflict): ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) # try as an admin with self.assertRaises(ArticleMergeConflict): ArticleService.create_article(self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) ba.remove_identifiers(ba.DOI) # check for DOI from other article owned by someone else ba.add_identifier(ba.DOI, "10.0000/article-2") # try as a publisher with self.assertRaises(ArticleMergeConflict): ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) # try as an admin with self.assertRaises(ArticleMergeConflict): ArticleService.create_article(self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) def test_04_old_doi_new_url(self): ba = self.article10.bibjson() ba.remove_urls(ba.FULLTEXT) ba.add_url("https://updated.com", ba.FULLTEXT) # try as publisher with self.assertRaises(DuplicateArticleException): ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) # try as an admin resp = ArticleService.create_article( self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) assert resp["success"] == 1, "expected 1 updated, received: {}".format( resp) assert resp["update"] == 1, "expected 1 updated, received: {}".format( resp) assert resp["new"] == 0, "expected 1 new, received: {}".format(resp) assert self.article10.get_normalised_fulltext( ) == "//updated.com", "expected //updated.com, received: {}".format( self.article10.get_normalised_fulltext()) def test_05_new_doi_old_url(self): ba = self.article10.bibjson() ba.remove_identifiers(ba.DOI) ba.add_identifier(ba.DOI, "10.0000/article-UPDATED") # try as publisher with self.assertRaises(DuplicateArticleException): ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) # try as an admin resp = ArticleService.create_article( self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) assert resp["success"] == 1, "expected 1 updated, received: {}".format( resp) assert resp["update"] == 1, "expected 1 updated, received: {}".format( resp) assert resp["new"] == 0, "expected 1 updated, received: {}".format( resp) assert self.article10.get_normalised_doi() == "10.0000/article-UPDATED", \ "expected 10.0000/article-UPDATED, received: {}".format( self.article10.get_normalised_fulltext()) def test_06_existing_doi_new_url(self): ba = self.article10.bibjson() ba.remove_urls(ba.FULLTEXT) ba.add_url("https://updated.com", ba.FULLTEXT) # check for doi from other article of the same publisher ba.remove_identifiers(ba.DOI) ba.add_identifier(ba.DOI, self.article11.bibjson().get_one_identifier(ba.DOI)) # try as publisher with self.assertRaises(DuplicateArticleException): ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) with self.assertRaises(DuplicateArticleException): ArticleService.create_article(self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) def test_07_new_doi_existing_url(self): ba = self.article10.bibjson() ba.remove_urls(ba.FULLTEXT) ba.add_url(self.article11.bibjson().get_single_url(ba.FULLTEXT), ba.FULLTEXT) # check for doi from other article of the same publisher ba.remove_identifiers(ba.DOI) ba.add_identifier(ba.DOI, "10.0000/article-UPDATED") # try as publisher with self.assertRaises(DuplicateArticleException): ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) # try as an admin with self.assertRaises(DuplicateArticleException): ArticleService.create_article(self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id)
def test_01_create_article(self, name, kwargs): article_arg = kwargs.get("article") article_duplicate_arg = kwargs.get("article_duplicate") account_arg = kwargs.get("account") duplicate_check_arg = kwargs.get("duplicate_check") merge_duplicate_arg = kwargs.get("merge_duplicate") limit_to_account_arg = kwargs.get("limit_to_account") dry_run_arg = kwargs.get("dry_run") raises_arg = kwargs.get("raises") success_arg = kwargs.get("success") original_saved_arg = kwargs.get("original_saved") merge_saved_arg = kwargs.get("merge_saved") ############################################### ## set up success = int(success_arg) duplicate_check = None if duplicate_check_arg != "none": duplicate_check = True if duplicate_check_arg == "true" else False merge_duplicate = None if merge_duplicate_arg != "none": merge_duplicate = True if merge_duplicate_arg == "true" else False limit_to_account = None if limit_to_account_arg != "none": limit_to_account = True if limit_to_account_arg == "true" else False dry_run = None if dry_run_arg != "none": dry_run = True if dry_run_arg == "true" else False raises = EXCEPTIONS.get(raises_arg) article = None original_id = None if article_arg == "exists": source = ArticleFixtureFactory.make_article_source(eissn="1234-5678", pissn="9876-5432", doi="10.123/abc/1", fulltext="http://example.com/1") article = Article(**source) article.set_id() original_id = article.id account = None if account_arg != "none": source = AccountFixtureFactory.make_publisher_source() account = Account(**source) legit = True if account_arg == "owner" else False ilo_mock = BLLArticleMockFactory.is_legitimate_owner(legit=legit) self.svc.is_legitimate_owner = ilo_mock owned = ["1234-5678", "9876-5432"] if account_arg == "owner" else [] shared = [] unowned = ["1234-5678"] if account_arg == "not_owner" else [] unmatched = ["9876-5432"] if account_arg == "not_owner" else [] ios_mock = BLLArticleMockFactory.issn_ownership_status(owned, shared, unowned, unmatched) self.svc.issn_ownership_status = ios_mock gd_mock = None if article_duplicate_arg == "yes": gd_mock = BLLArticleMockFactory.get_duplicate(eissn="1234-5678", pissn="9876-5432", doi="10.123/abc/1", fulltext="http://example.com/1") else: gd_mock = BLLArticleMockFactory.get_duplicate(return_none=True) self.svc.get_duplicate = gd_mock mock_article = self.svc.get_duplicate(article) ########################################################### # Execution if raises is not None: with self.assertRaises(raises): self.svc.create_article(article, account, duplicate_check, merge_duplicate, limit_to_account, dry_run) else: report = self.svc.create_article(article, account, duplicate_check, merge_duplicate, limit_to_account, dry_run) assert report["success"] == success # check that the article was saved and if it was saved that it was suitably merged if original_saved_arg == "yes": original = Article.pull(original_id) assert original is not None assert report["update"] == 0 elif article is not None: original = Article.pull(original_id) assert original is None if merge_saved_arg == "yes": merged = Article.pull(mock_article.id) assert merged is not None assert report["update"] == 1 elif mock_article is not None: merged = Article.pull(mock_article.id) assert merged is None
def test_01_update_request(self, name, journal_id, journal_lock, account, account_role, account_is_owner, current_applications, application_lock, application_status, completed_applications, raises, return_app, return_jlock, return_alock, db_jlock, db_alock, db_app): ############################################### ## set up # create the journal journal = None jid = None if journal_id == "valid": journal = Journal(**JournalFixtureFactory.make_journal_source(in_doaj=True)) journal.remove_related_applications() journal.remove_current_application() jid = journal.id elif journal_id == "not_in_doaj": journal = Journal(**JournalFixtureFactory.make_journal_source(in_doaj=False)) journal.remove_related_applications() journal.remove_current_application() jid = journal.id elif journal_id == "missing": jid = uuid.uuid4().hex acc = None if account == "yes": acc = Account(**AccountFixtureFactory.make_publisher_source()) if account_role == "none": acc.remove_role("publisher") elif account_role == "admin": acc.remove_role("publisher") acc.add_role("admin") acc.set_id(acc.makeid()) if account_is_owner == "yes": acc.set_id(journal.owner) if journal_lock == "yes": lock.lock("journal", jid, "someoneelse", blocking=True) latest_app = None current_app_count = int(current_applications) for i in range(current_app_count): app = Suggestion(**ApplicationFixtureFactory.make_application_source()) app.set_id(app.makeid()) app.set_created("198" + str(i) + "-01-01T00:00:00Z") app.set_current_journal(jid) app.save() latest_app = app if journal is not None: journal.set_current_application(app.id) comp_app_count = int(completed_applications) for i in range(comp_app_count): app = Suggestion(**ApplicationFixtureFactory.make_application_source()) app.set_id(app.makeid()) app.set_created("197" + str(i) + "-01-01T00:00:00Z") app.set_related_journal(jid) app.save() if journal is not None: journal.add_related_application(app.id, date_accepted=app.created_date) if current_app_count == 0 and comp_app_count == 0: # save at least one record to initialise the index mapping, otherwise tests fail app = Suggestion(**ApplicationFixtureFactory.make_application_source()) app.set_id(app.makeid()) app.save() if application_lock == "yes": lock.lock("suggestion", latest_app.id, "someoneelse", blocking=True) if application_status != "n/a": latest_app.set_application_status(application_status) latest_app.save(blocking=True) # finally save the journal record, ensuring we get a blocking save, so everything # above here should be synchronised with the repo if journal is not None: journal.save(blocking=True) ########################################################### # Execution svc = DOAJ.applicationService() if raises != "": with self.assertRaises(EXCEPTIONS[raises]): svc.update_request_for_journal(jid, acc) else: application, jlock, alock = svc.update_request_for_journal(jid, acc) # we need to sleep, so the index catches up time.sleep(1) if return_app == "none": assert application is None elif return_app == "yes": assert application is not None if return_jlock == "none": assert jlock is None elif return_jlock == "yes": assert jlock is not None if return_alock == "none": assert alock is None elif return_alock == "yes": assert alock is not None if db_jlock == "no" and acc is not None: assert not lock.has_lock("journal", jid, acc.id) elif db_jlock == "yes" and acc is not None: assert lock.has_lock("journal", jid, acc.id) if db_alock == "no" and application.id is not None and acc is not None: assert not lock.has_lock("suggestion", application.id, acc.id) elif db_alock == "yes" and application.id is not None and acc is not None: assert lock.has_lock("suggestion", application.id, acc.id) if db_app == "no" and application.id is not None: indb = Suggestion.q2obj(q="id.exact:" + application.id) assert indb is None elif db_app == "yes" and application.id is not None: indb = Suggestion.q2obj(q="id.exact:" + application.id) assert indb is not None if current_app_count == 0 and comp_app_count == 0 and application is not None: assert application.article_metadata is None assert application.articles_last_year is None elif application is not None: assert application.article_metadata is not None assert application.articles_last_year is not None
def test_01_batch_create_article(self, name, kwargs): articles_arg = kwargs.get("articles") duplicate_in_batch_arg = kwargs.get("duplicate_in_batch") duplicate_in_index_arg = kwargs.get("duplicate_in_index") account_arg = kwargs.get("account") duplicate_check_arg = kwargs.get("duplicate_check") merge_duplicate_arg = kwargs.get("merge_duplicate") limit_to_account_arg = kwargs.get("limit_to_account") raises_arg = kwargs.get("raises") success_arg = kwargs.get("success") fail_arg = kwargs.get("fail") update_arg = kwargs.get("update") ############################################### ## set up success = int(success_arg) fail = int(fail_arg) update = int(update_arg) duplicate_in_batch = duplicate_in_batch_arg == "yes" duplicate_in_index = duplicate_in_index_arg == "yes" raises = EXCEPTIONS.get(raises_arg) duplicate_check = None if duplicate_check_arg != "none": duplicate_check = True if duplicate_check_arg == "true" else False merge_duplicate = None if merge_duplicate_arg != "none": merge_duplicate = True if merge_duplicate_arg == "true" else False limit_to_account = None if limit_to_account_arg != "none": limit_to_account = True if limit_to_account_arg == "true" else False account = None if account_arg != "none": source = AccountFixtureFactory.make_publisher_source() account = Account(**source) last_doi = None last_ft = None last_issn = None last_id = None articles = None if articles_arg != "none": articles = [] if articles_arg == "yes": # one with a DOI and no fulltext source = ArticleFixtureFactory.make_article_source( eissn="0000-0000", pissn="0000-0000", doi="10.123/abc/0", fulltext=False) article = Article(**source) article.set_id() articles.append(article) # another with a DOI and no fulltext source = ArticleFixtureFactory.make_article_source( eissn="1111-1111", pissn="1111-1111", doi="10.123/abc/1", fulltext=False) article = Article(**source) article.set_id() articles.append(article) # one with a fulltext and no DOI source = ArticleFixtureFactory.make_article_source( eissn="2222-2222", pissn="2222-2222", fulltext="http://example.com/2", doi=False) article = Article(**source) article.set_id() articles.append(article) # another one with a fulltext and no DOI source = ArticleFixtureFactory.make_article_source( eissn="3333-3333", pissn="3333-3333", fulltext="http://example.com/3", doi=False) article = Article(**source) article.set_id() articles.append(article) last_issn = "3333-3333" last_doi = "10.123/abc/1" last_ft = "http://example.com/3" last_id = articles[-1].id if duplicate_in_batch: # one with a duplicated DOI source = ArticleFixtureFactory.make_article_source( eissn="4444-4444", pissn="4444-4444", doi="10.123/abc/0", fulltext="http://example.com/4") article = Article(**source) article.set_id() articles.append(article) # one with a duplicated Fulltext source = ArticleFixtureFactory.make_article_source( eissn="5555-5555", pissn="5555-5555", doi="10.123/abc/5", fulltext="http://example.com/1") article = Article(**source) article.set_id() articles.append(article) """ article_count = int(articles_arg) for i in range(article_count): idx = str(i) if duplicate_in_batch: if i < 2: idx = "duplicate" last_issn = str(i) * 4 + "-" + str(i) * 4 last_doi = "10.123/abc/" + idx last_ft = "http://example.com/" + idx source = ArticleFixtureFactory.make_article_source(eissn=last_issn, pissn=last_issn, doi=last_doi, fulltext=last_ft) article = Article(**source) article.set_id() last_id = article.id articles.append(article) """ ilo_mock = None if account_arg == "owner": ilo_mock = BLLArticleMockFactory.is_legitimate_owner(legit=True) elif account_arg == "own_1": ilo_mock = BLLArticleMockFactory.is_legitimate_owner( legit_on_issn=[last_issn]) else: ilo_mock = BLLArticleMockFactory.is_legitimate_owner() self.svc.is_legitimate_owner = ilo_mock gd_mock = None if duplicate_in_index: gd_mock = BLLArticleMockFactory.get_duplicate( given_article_id=last_id, eissn=last_issn, pissn=last_issn, doi=last_doi, fulltext=last_ft) else: gd_mock = BLLArticleMockFactory.get_duplicate(return_none=True) self.svc.get_duplicate = gd_mock ios_mock = BLLArticleMockFactory.issn_ownership_status([], [], [], []) self.svc.issn_ownership_status = ios_mock ########################################################### # Execution if raises is not None: with self.assertRaises(raises): try: self.svc.batch_create_articles(articles, account, duplicate_check, merge_duplicate, limit_to_account) except exceptions.IngestException as e: report = e.result assert report["success"] == success assert report["fail"] == fail assert report["update"] == update assert report["new"] == success - update raise else: report = self.svc.batch_create_articles(articles, account, duplicate_check, merge_duplicate, limit_to_account) # make sure all the articles are saved before running the asserts aids = [(a.id, a.last_updated) for a in articles] for aid, lu in aids: Article.block(aid, lu, sleep=0.05) assert report["success"] == success assert report["fail"] == fail assert report["update"] == update assert report["new"] == success - update if success > 0: all_articles = Article.all() if len(all_articles) != success: time.sleep(0.5) all_articles = Article.all() assert len(all_articles) == success else: # there's nothing in the article index with self.assertRaises(ESMappingMissingError): Article.all()
def test_01_create_article(self, name, kwargs): article_arg = kwargs.get("article") article_duplicate_arg = kwargs.get("article_duplicate") account_arg = kwargs.get("account") duplicate_check_arg = kwargs.get("duplicate_check") merge_duplicate_arg = kwargs.get("merge_duplicate") limit_to_account_arg = kwargs.get("limit_to_account") add_journal_info_arg = kwargs.get("add_journal_info") dry_run_arg = kwargs.get("dry_run") raises_arg = kwargs.get("raises") success_arg = kwargs.get("success") original_saved_arg = kwargs.get("original_saved") merge_saved_arg = kwargs.get("merge_saved") ############################################### ## set up success = int(success_arg) duplicate_check = None if duplicate_check_arg != "none": duplicate_check = True if duplicate_check_arg == "true" else False merge_duplicate = None if merge_duplicate_arg != "none": merge_duplicate = True if merge_duplicate_arg == "true" else False limit_to_account = None if limit_to_account_arg != "none": limit_to_account = True if limit_to_account_arg == "true" else False add_journal_info = None if add_journal_info_arg != "none": add_journal_info = True if add_journal_info_arg == "true" else False dry_run = None if dry_run_arg != "none": dry_run = True if dry_run_arg == "true" else False raises = EXCEPTIONS.get(raises_arg) eissn = "1234-5678" pissn = "9876-5432" if add_journal_info: jsource = JournalFixtureFactory.make_journal_source(in_doaj=True) j = Journal(**jsource) bj = j.bibjson() bj.title = "Add Journal Info Title" bj.remove_identifiers() bj.add_identifier(bj.P_ISSN, pissn) bj.add_identifier(bj.E_ISSN, eissn) j.save(blocking=True) article = None original_id = None if article_arg == "exists": source = ArticleFixtureFactory.make_article_source( eissn=eissn, pissn=pissn, doi="10.123/abc/1", fulltext="http://example.com/1") del source["bibjson"]["journal"] article = Article(**source) article.set_id() original_id = article.id account = None if account_arg != "none": source = AccountFixtureFactory.make_publisher_source() account = Account(**source) legit = True if account_arg == "owner" else False ilo_mock = BLLArticleMockFactory.is_legitimate_owner(legit=legit) self.svc.is_legitimate_owner = ilo_mock owned = [eissn, pissn] if account_arg == "owner" else [] shared = [] unowned = [eissn] if account_arg == "not_owner" else [] unmatched = [pissn] if account_arg == "not_owner" else [] ios_mock = BLLArticleMockFactory.issn_ownership_status( owned, shared, unowned, unmatched) self.svc.issn_ownership_status = ios_mock gd_mock = None if article_duplicate_arg == "yes": gd_mock = BLLArticleMockFactory.get_duplicate( eissn=eissn, pissn=pissn, doi="10.123/abc/1", fulltext="http://example.com/1") else: gd_mock = BLLArticleMockFactory.get_duplicate(return_none=True) self.svc.get_duplicate = gd_mock mock_article = self.svc.get_duplicate(article) ########################################################### # Execution if raises is not None: with self.assertRaises(raises): self.svc.create_article(article, account, duplicate_check, merge_duplicate, limit_to_account, add_journal_info, dry_run) else: report = self.svc.create_article(article, account, duplicate_check, merge_duplicate, limit_to_account, add_journal_info, dry_run) assert report["success"] == success # check that the article was saved and if it was saved that it was suitably merged if original_saved_arg == "yes": original = Article.pull(original_id) assert original is not None assert report["update"] == 0 elif article is not None: original = Article.pull(original_id) assert original is None if merge_saved_arg == "yes": merged = Article.pull(mock_article.id) assert merged is not None assert report["update"] == 1 elif mock_article is not None: merged = Article.pull(mock_article.id) assert merged is None if add_journal_info: assert article.bibjson( ).journal_title == "Add Journal Info Title"
es = Journal.query(q=q) pubs = [term.get("term") for term in es.get("facets", {}).get("publishers", {}).get("terms", [])] if len(pubs) == 0: return None return ", ".join(pubs) fall = open(ALL, "wb") all_writer = csv.writer(fall) all_writer.writerow(["Account ID", "Email", "Name", "Publisher"]) fmulti = open(MULTI, "wb") multi_writer = csv.writer(fmulti) multi_writer.writerow(["Account ID", "Email", "Name", "Publisher"]) print "processing accounts" for a in Account.iterall(): id = a.id name = a.name count = len(a.journal) if a.journal is not None else 0 email = a.email if email is None: continue publisher = None if count > 0: publisher = get_publisher(a) # if they don't publish any journals in the doaj, ignore their account if publisher is None: continue
] if len(pubs) == 0: return None return ", ".join(pubs) fall = open(ALL, "wb") all_writer = csv.writer(fall) all_writer.writerow(["Account ID", "Email", "Name", "Publisher"]) fmulti = open(MULTI, "wb") multi_writer = csv.writer(fmulti) multi_writer.writerow(["Account ID", "Email", "Name", "Publisher"]) print "processing accounts" for a in Account.iterall(): id = a.id name = a.name count = len(a.journal) if a.journal is not None else 0 email = a.email if email is None: continue publisher = None if count > 0: publisher = get_publisher(a) # if they don't publish any journals in the doaj, ignore their account if publisher is None: continue
def journals_applications_provenance(outfile_applications, outfile_accounts, outfile_reapps, conn): with codecs.open(outfile_applications, "wb", "utf-8") as f, codecs.open( outfile_accounts, "wb", "utf-8") as g, codecs.open(outfile_reapps, "wb", "utf-8") as h: out_applications = csv.writer(f) out_applications.writerow([ "Journal ID", "Journal Created", "Journal Reapplied", "Application ID", "Application Last Updated", "Application Status", "Published Diff", "Latest Edit Recorded", "Latest Accepted Recorded" ]) out_accounts = csv.writer(g) out_accounts.writerow([ "Journal ID", "Journal Created", "Journal Reapplied", "In DOAJ", "Missing Account ID" ]) out_reapps = csv.writer(h) out_reapps.writerow([ "Journal ID", "Journal Created", "Journal Reapplied", "Application ID", "Application Created", "Application Last Updated", "Application Last Manual Update", "Application Status", "Published Diff" ]) counter = 0 for result in esprit.tasks.scroll(conn, "journal", keepalive="45m"): counter += 1 journal = Journal(**result) print counter, journal.id # first figure out if there is a broken related application issns = journal.bibjson().issns() applications = Suggestion.find_by_issn(issns) latest = None for application in applications: if latest is None: latest = application if application.last_updated_timestamp > latest.last_updated_timestamp: latest = application if latest is None: continue jcreated = journal.created_timestamp reapp = journal.last_update_request print counter, journal.id, reapp if reapp is not None: jcreated = datetime.strptime(reapp, "%Y-%m-%dT%H:%M:%SZ") jcreated = adjust_timestamp(jcreated, JOURNAL_TIMEZONE_CUTOFF) app_lustamp = adjust_timestamp(latest.last_updated_timestamp, APP_TIMEZONE_CUTOFF) # app_man_lustamp = latest.last_manual_update_timestamp # no need to adjust this one app_man_lustamp = adjust_timestamp( latest.last_manual_update_timestamp, APP_TIMEZONE_CUTOFF) td = jcreated - app_lustamp mtd = jcreated - app_man_lustamp diff = td.total_seconds() mdiff = mtd.total_seconds() # was the journal created after the application by greater than the threshold? if diff > THRESHOLD: last_edit = "" last_accept = "" edit_query = deepcopy(PROV_QUERY) edit_query["query"]["bool"]["must"][0]["term"][ "resource_id.exact"] = latest.id edit_query["query"]["bool"]["must"][1]["term"][ "action.exact"] = "edit" provs = Provenance.q2obj(q=edit_query) if len(provs) > 0: last_edit = provs[0].last_updated accept_query = deepcopy(PROV_QUERY) accept_query["query"]["bool"]["must"][0]["term"][ "resource_id.exact"] = latest.id accept_query["query"]["bool"]["must"][1]["term"][ "action.exact"] = "status:accepted" provs = Provenance.q2obj(q=accept_query) if len(provs) > 0: last_accept = provs[0].last_updated out_applications.writerow([ journal.id, journal.created_date, journal.last_update_request, latest.id, latest.last_updated, latest.application_status, diff, last_edit, last_accept ]) # was the journal (in doaj) created before the application by greater than the threshold, and is it in a state other than rejected if mdiff < -1 * THRESHOLD and latest.application_status != constants.APPLICATION_STATUS_REJECTED and journal.is_in_doaj( ): out_reapps.writerow([ journal.id, journal.created_date, journal.last_update_request, latest.id, latest.created_date, latest.last_updated, latest.last_manual_update, latest.application_status, mdiff ]) # now figure out if the account is missing owner = journal.owner if owner is None: out_accounts.writerow([ journal.id, journal.created_date, journal.last_update_request, str(journal.is_in_doaj()), "NO OWNER" ]) else: acc = Account.pull(owner) if acc is None: out_accounts.writerow([ journal.id, journal.created_date, journal.last_update_request, str(journal.is_in_doaj()), owner ]) print "processed", counter, "journals"
def test_01_delete_application(self, name, application_type, account_type, current_journal, related_journal, raises): ############################################### ## set up # create the test application (if needed), and the associated current_journal and related_journal in suitable states application = None cj = None rj = None if application_type == "found" or application_type == "locked": application = Suggestion( **ApplicationFixtureFactory.make_application_source()) if current_journal == "none": application.remove_current_journal() elif current_journal == "not_found": application.set_current_journal("123456789987654321") elif current_journal == "found": cj = Journal(**JournalFixtureFactory.make_journal_source()) cj.set_id(cj.makeid()) cj.save(blocking=True) application.set_current_journal(cj.id) elif current_journal == "locked": cj = Journal(**JournalFixtureFactory.make_journal_source()) cj.set_id(cj.makeid()) cj.save(blocking=True) application.set_current_journal(cj.id) lock.lock(constants.LOCK_JOURNAL, cj.id, "otheruser") if related_journal == "none": application.remove_related_journal() elif related_journal == "not_found": application.set_related_journal("123456789987654321") elif related_journal == "found": rj = Journal(**JournalFixtureFactory.make_journal_source()) rj.set_id(rj.makeid()) rj.save(blocking=True) application.set_related_journal(rj.id) elif related_journal == "locked": rj = Journal(**JournalFixtureFactory.make_journal_source()) rj.set_id(rj.makeid()) rj.save(blocking=True) application.set_related_journal(rj.id) lock.lock(constants.LOCK_JOURNAL, rj.id, "otheruser") acc = None if account_type != "none": acc = Account(**AccountFixtureFactory.make_publisher_source()) if account_type == "not_permitted": acc.remove_role("publisher") if application_type == "locked": thelock = lock.lock(constants.LOCK_APPLICATION, application.id, "otheruser") # we can't explicitly block on the lock, but we can halt until we confirm it is saved thelock.blockall([(thelock.id, thelock.last_updated)]) application_id = None if application is not None: if acc is not None: application.set_owner(acc.id) application.save(blocking=True) application_id = application.id elif application_type == "not_found": application_id = "sdjfasofwefkwflkajdfasjd" ########################################################### # Execution svc = DOAJ.applicationService() if raises != "": with self.assertRaises(EXCEPTIONS[raises]): svc.delete_application(application_id, acc) time.sleep(1) check_locks(application, cj, rj, acc) else: svc.delete_application(application_id, acc) # we need to sleep, so the index catches up time.sleep(1) # check that no locks remain set for this user check_locks(application, cj, rj, acc) # check that the application actually is gone if application is not None: assert Suggestion.pull(application.id) is None # check that the current journal no longer has a reference to the application if cj is not None: cj = Journal.pull(cj.id) assert cj.current_application is None # check that the related journal has a record that the application was deleted if rj is not None: rj = Journal.pull(rj.id) record = rj.related_application_record(application.id) assert "status" in record assert record["status"] == "deleted"