def test_has_permissions(self): journal_source = JournalFixtureFactory.make_journal_source() journal1 = Journal(**journal_source) publisher_owner_src = AccountFixtureFactory.make_publisher_source() publisher_owner = Account(**publisher_owner_src) publisher_stranged_src = AccountFixtureFactory.make_publisher_source() publisher_stranged = Account(**publisher_stranged_src) admin_src = AccountFixtureFactory.make_managing_editor_source() admin = Account(**admin_src) journal1.set_owner(publisher_owner) journal1.save(blocking=True) eissn = journal1.bibjson().get_one_identifier("eissn") pissn = journal1.bibjson().get_one_identifier("pissn") art_source = ArticleFixtureFactory.make_article_source(eissn=eissn, pissn=pissn) article = Article(**art_source) assert self.svc.has_permissions(publisher_stranged, article, False) assert self.svc.has_permissions(publisher_owner, article, True) assert self.svc.has_permissions(admin, article, True) failed_result = self.svc.has_permissions(publisher_stranged, article, True) assert failed_result == {'success': 0, 'fail': 1, 'update': 0, 'new': 0, 'shared': [], 'unowned': [pissn, eissn], 'unmatched': []}, "received: {}".format(failed_result)
def test_03_view_application(self, name, account_type, role, owner, application_type, raises=None, returns=None, auth_reason=None): # set up the objects application = None if application_type == "exists": application = Suggestion( **ApplicationFixtureFactory.make_application_source()) account = None if account_type == "exists": if role == "none": account = Account( **AccountFixtureFactory.make_publisher_source()) account.remove_role("publisher") elif role == "publisher": account = Account( **AccountFixtureFactory.make_publisher_source()) elif role == "admin": account = Account( **AccountFixtureFactory.make_managing_editor_source()) if owner == "yes": application.set_owner(account.id) svc = DOAJ.authorisationService() if raises is not None and raises != "": exception = None with self.assertRaises(EXCEPTIONS[raises]): try: svc.can_view_application(account, application) except Exception as e: exception = e raise e if raises == "AuthoriseException": if auth_reason == "not_owner": assert exception.reason == exception.NOT_OWNER elif auth_reason == "wrong_role": assert exception.reason == exception.WRONG_ROLE elif returns is not None: expected = returns == "true" assert svc.can_view_application(account, application) is expected else: assert False, "Specify either raises or returns"
def setUp(self): super(TestCreateOrUpdateArticle, self).setUp() self.publisher = Account() self.publisher.add_role("publisher") self.publisher.save(blocking=True) self.admin = Account() self.admin.add_role("admin") self.admin.save(blocking=True) sources = JournalFixtureFactory.make_many_journal_sources(2, True) self.journal1 = Journal(**sources[0]) self.journal1.set_owner(self.publisher.id) jbib1 = self.journal1.bibjson() jbib1.add_identifier(jbib1.P_ISSN, "1111-1111") jbib1.add_identifier(jbib1.E_ISSN, "2222-2222") self.journal1.save(blocking=True) self.publisher.add_journal(self.journal1) self.journal2 = Journal(**sources[1]) jbib2 = self.journal2.bibjson() jbib2.add_identifier(jbib2.P_ISSN, "1234-5678") jbib2.add_identifier(jbib2.E_ISSN, "9876-5432") self.journal2.save(blocking=True) self.article10 = Article(**ArticleFixtureFactory.make_article_source( pissn="1111-1111", eissn="2222-2222", doi="10.0000/article-10", fulltext="https://www.article10.com")) self.article10.set_id("articleid10") self.article10.save(blocking=True) self.article11 = Article(**ArticleFixtureFactory.make_article_source( pissn="1111-1111", eissn="2222-2222", doi="10.0000/article-11", fulltext="https://www.article11.com")) self.article11.set_id("articleid11") self.article11.save(blocking=True) self.article2 = Article(**ArticleFixtureFactory.make_article_source( pissn="1234-5678", eissn="9876-5432", doi="10.0000/article-2", fulltext="https://www.article2.com")) self.article2.set_id("articleid2") self.article2.save(blocking=True)
def setUp(self): super(TestBLLPrepareUpdatePublisher, self).setUp() self.svc = DOAJ.articleService() self.is_id_updated = self.svc._doi_or_fulltext_updated self.has_permission = self.svc.has_permissions self.merge = Article.merge acc_source = AccountFixtureFactory.make_publisher_source() self.publisher = Account(**acc_source)
def load_j2a_cases(): journal = Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) account_source = AccountFixtureFactory.make_publisher_source() owner_account = Account(**deepcopy(account_source)) owner_account.set_id(journal.owner) non_owner_publisher = Account(**deepcopy(account_source)) non_publisher = Account(**deepcopy(account_source)) non_publisher.remove_role("publisher") admin = Account(**deepcopy(account_source)) admin.add_role("admin") return [ param("no_journal_no_account", None, None, raises=exceptions.ArgumentException), param("no_journal_with_account", None, owner_account, raises=exceptions.ArgumentException), param("journal_no_account", journal, None, comparator=application_matches), param("journal_matching_account", journal, owner_account, comparator=application_matches), param("journal_unmatched_account", journal, non_owner_publisher, raises=exceptions.AuthoriseException), param("journal_non_publisher_account", journal, non_publisher, raises=exceptions.AuthoriseException), param("journal_admin_account", journal, admin, comparator=application_matches) ]
def publishers_with_journals(): """ Get accounts for all publishers with journals in the DOAJ """ for acc in esprit.tasks.scroll(conn, 'account', q=publisher_query): account = Account(**acc) journal_ids = account.journal if journal_ids is not None: for j in journal_ids: journal = Journal.pull(j) if journal is not None and journal.is_in_doaj(): yield account break
def create_user(username, email, password, roles): if password is None: password = input_password(username) prefix = "Modified existing user: "******"Created new user: " acc = Account(id=username, email=email) acc.set_email(email) acc.set_role(roles) acc.set_password(password) acc.save() print(prefix + username)
def load_journal_cases(): account = Account(**AccountFixtureFactory.make_publisher_source()) account.set_id(account.makeid()) journal = Journal(**JournalFixtureFactory.make_journal_source(in_doaj=True)) journal.set_id(journal.makeid()) wrong_id = uuid.uuid4() return [ param("j_id_acc_lock", journal, journal.id, account, True, raises=lock.Locked), param("j_id_acc_nolock", journal, journal.id, account, False), param("j_id_noacc_nolock", journal, journal.id, None, False), param("j_noid_noacc_nolock", journal, None, None, False, raises=exceptions.ArgumentException), param("j_wid_noacc_nolock", journal, wrong_id, None, False), param("noj_id_noacc_nolock", None, journal.id, None, False), param("noj_noid_noacc_nolock", None, None, None, False, raises=exceptions.ArgumentException) ]
def load_application_cases(): account = Account(**AccountFixtureFactory.make_publisher_source()) account.set_id(account.makeid()) application = Suggestion(**ApplicationFixtureFactory.make_application_source()) application.makeid() wrong_id = uuid.uuid4() return [ param("a_id_acc_lock", application, application.id, account, True, raises=lock.Locked), param("a_id_acc_nolock", application, application.id, account, False), param("a_id_noacc_nolock", application, application.id, None, False), param("a_noid_noacc_nolock", application, None, None, False, raises=exceptions.ArgumentException), param("a_wid_noacc_nolock", application, wrong_id, None, False), param("noa_id_noacc_nolock", None, application.id, None, False), param("noa_noid_noacc_nolock", None, None, None, False, raises=exceptions.ArgumentException) ]
def test_01_is_legitimate_owner(self, name, kwargs): article_arg = kwargs.get("article") owner_arg = kwargs.get("owner") article_eissn_arg = kwargs.get("article_eissn") article_pissn_arg = kwargs.get("article_pissn") seen_eissn_arg = kwargs.get("seen_eissn") seen_pissn_arg = kwargs.get("seen_pissn") journal_owner_arg = kwargs.get("journal_owner") raises_arg = kwargs.get("raises") legit_arg = kwargs.get("legit") raises = EXCEPTIONS.get(raises_arg) ############################################### ## set up owner = None if owner_arg != "none": owner = Account(**AccountFixtureFactory.make_publisher_source()) owner_id = None if owner is not None: owner_id = owner.id # generate our incoming article article = None eissn = None pissn = None if article_arg == "exists": source = ArticleFixtureFactory.make_article_source() article = Article(**source) article.set_id() article.bibjson().remove_identifiers("pissn") if article_pissn_arg == "yes": pissn = "1234-5678" article.bibjson().add_identifier("pissn", pissn) article.bibjson().remove_identifiers("eissn") if article_eissn_arg == "yes": eissn = "9876-5432" article.bibjson().add_identifier("eissn", eissn) # assemble the issns that will appear to be in the index. One that is irrelevant, and just # serves to be "noise" in the database, and the other that matches the spec required by # the test issns = [("1111-1111", "2222-2222")] if eissn is not None and pissn is not None and seen_eissn_arg == "yes" and seen_pissn_arg == "yes": issns.append((eissn, pissn)) if eissn is not None and seen_eissn_arg == "yes": issns.append((eissn, None)) if pissn is not None and seen_pissn_arg == "yes": issns.append((None, pissn)) owners = [] if journal_owner_arg == "none": owners = [None] elif journal_owner_arg == "correct" and owner_id is not None: owners = [owner_id] elif journal_owner_arg == "incorrect": owners = ["randomowner"] elif journal_owner_arg == "mix" and owner_id is not None: owners.append(owner_id) owners.append("randomowner") owners.append(None) mock = ModelJournalMockFactory.find_by_issn(issns, owners) Journal.find_by_issn = mock ########################################################### # Execution svc = DOAJ.articleService() if raises is not None: with self.assertRaises(raises): svc.is_legitimate_owner(article, owner_id) else: legit = svc.is_legitimate_owner(article, owner_id) if legit_arg == "no": assert legit is False elif legit_arg == "yes": assert legit is True
def migrate_contacts(source, batch_size=1000): # read in the content f = open(source) xml = etree.parse(f) f.close() contacts = xml.getroot() print "migrating", str(len(contacts)), "contact records from", source # first thing to do is locate all the duplicates in the logins record = [] duplicates = [] for element in contacts: login = element.find("login") if login is not None and login.text is not None and login.text != "": if login.text in record: duplicates.append(login.text) record.append(login.text) # now go through and load all the user accounts batch = [] for element in contacts: login = element.find("login") password = element.find("password") name = element.find("name") email = element.find("email") issns = element.findall("issn") if login is None or login.text is None or login.text == "": print "ERROR: contact without login - providing login" if len(issns) == 0: # make a random 8 character login name login = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(8)) else: # select the first issn login = issns[0].text else: login = login.text if password is None or password.text is None or password.text == "": print "ERROR: contact without password", login, "- providing one" # make a random 8 character password password = ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(8)) else: password = password.text # check to see if this is a duplicate if login in duplicates: if len(issns) == 0: print "INFO: duplicate detected, has no ISSNs, so skipping", login continue else: print "INFO: duplicate detected, with ISSNs, so keeping", login a = Account() a.set_id(login) a.set_password(password) if name is not None and name.text is not None and name.text != "": a.set_name(name.text) if email is not None and email.text is not None and email.text != "": a.set_email(email.text) for issn in issns: if issn is not None and issn.text is not None and issn.text != "": jid = _get_journal_id_from_issn(issn.text) a.add_journal(jid) a.prep() # prep for saving, since we're not actually going to call save() batch.append(a.data) if len(batch) >= batch_size: Account.bulk(batch, refresh=True) del batch[:] if len(batch) > 0: Account.bulk(batch)
def test_01_delete_application(self, name, application_type, account_type, current_journal, related_journal, raises): ############################################### ## set up # create the test application (if needed), and the associated current_journal and related_journal in suitable states application = None cj = None rj = None if application_type == "found" or application_type == "locked": application = Suggestion( **ApplicationFixtureFactory.make_application_source()) if current_journal == "none": application.remove_current_journal() elif current_journal == "not_found": application.set_current_journal("123456789987654321") elif current_journal == "found": cj = Journal(**JournalFixtureFactory.make_journal_source()) cj.set_id(cj.makeid()) cj.save(blocking=True) application.set_current_journal(cj.id) elif current_journal == "locked": cj = Journal(**JournalFixtureFactory.make_journal_source()) cj.set_id(cj.makeid()) cj.save(blocking=True) application.set_current_journal(cj.id) lock.lock(constants.LOCK_JOURNAL, cj.id, "otheruser") if related_journal == "none": application.remove_related_journal() elif related_journal == "not_found": application.set_related_journal("123456789987654321") elif related_journal == "found": rj = Journal(**JournalFixtureFactory.make_journal_source()) rj.set_id(rj.makeid()) rj.save(blocking=True) application.set_related_journal(rj.id) elif related_journal == "locked": rj = Journal(**JournalFixtureFactory.make_journal_source()) rj.set_id(rj.makeid()) rj.save(blocking=True) application.set_related_journal(rj.id) lock.lock(constants.LOCK_JOURNAL, rj.id, "otheruser") acc = None if account_type != "none": acc = Account(**AccountFixtureFactory.make_publisher_source()) if account_type == "not_permitted": acc.remove_role("publisher") if application_type == "locked": thelock = lock.lock(constants.LOCK_APPLICATION, application.id, "otheruser") # we can't explicitly block on the lock, but we can halt until we confirm it is saved thelock.blockall([(thelock.id, thelock.last_updated)]) application_id = None if application is not None: if acc is not None: application.set_owner(acc.id) application.save(blocking=True) application_id = application.id elif application_type == "not_found": application_id = "sdjfasofwefkwflkajdfasjd" ########################################################### # Execution svc = DOAJ.applicationService() if raises != "": with self.assertRaises(EXCEPTIONS[raises]): svc.delete_application(application_id, acc) time.sleep(1) check_locks(application, cj, rj, acc) else: svc.delete_application(application_id, acc) # we need to sleep, so the index catches up time.sleep(1) # check that no locks remain set for this user check_locks(application, cj, rj, acc) # check that the application actually is gone if application is not None: assert Suggestion.pull(application.id) is None # check that the current journal no longer has a reference to the application if cj is not None: cj = Journal.pull(cj.id) assert cj.current_application is None # check that the related journal has a record that the application was deleted if rj is not None: rj = Journal.pull(rj.id) record = rj.related_application_record(application.id) assert "status" in record assert record["status"] == "deleted"
"-r", "--role", help="comma separated list of roles to be held by this account") args = parser.parse_args() if not args.username: print "Please specify a username with the -u option" exit() if not args.role: print "WARNING: no role specified, so this user won't be able to do anything" username = args.username email = args.email password = None roles = [r.strip() for r in args.role.split(",")] if args.role is not None else [] if args.password: password = args.password else: password = input_password() acc = Account.pull(username) if not acc: acc = Account(id=username, email=email) acc.set_role(roles) acc.set_password(password) acc.save()
def test_01_batch_create_article(self, name, kwargs): articles_arg = kwargs.get("articles") duplicate_in_batch_arg = kwargs.get("duplicate_in_batch") duplicate_in_index_arg = kwargs.get("duplicate_in_index") account_arg = kwargs.get("account") duplicate_check_arg = kwargs.get("duplicate_check") merge_duplicate_arg = kwargs.get("merge_duplicate") limit_to_account_arg = kwargs.get("limit_to_account") add_journal_info_arg = kwargs.get("add_journal_info") raises_arg = kwargs.get("raises") success_arg = kwargs.get("success") fail_arg = kwargs.get("fail") update_arg = kwargs.get("update") ############################################### ## set up success = int(success_arg) fail = int(fail_arg) update = int(update_arg) duplicate_in_batch = duplicate_in_batch_arg == "yes" duplicate_in_index = int(duplicate_in_index_arg) raises = EXCEPTIONS.get(raises_arg) duplicate_check = None if duplicate_check_arg != "none": duplicate_check = True if duplicate_check_arg == "true" else False merge_duplicate = None if merge_duplicate_arg != "none": merge_duplicate = True if merge_duplicate_arg == "true" else False limit_to_account = None if limit_to_account_arg != "none": limit_to_account = True if limit_to_account_arg == "true" else False add_journal_info = None if add_journal_info_arg != "none": add_journal_info = True if add_journal_info_arg == "true" else False account = None if account_arg != "none": source = AccountFixtureFactory.make_publisher_source() account = Account(**source) journal_specs = [] last_doi = None last_ft = None last_issn = None last_id = None articles = None if articles_arg != "none": articles = [] if articles_arg == "yes": # one with a DOI and no fulltext source = ArticleFixtureFactory.make_article_source( eissn="0000-0000", pissn="0000-0000", doi="10.123/abc/0", fulltext=False) del source["bibjson"]["journal"] article = Article(**source) article.set_id() articles.append(article) if add_journal_info: journal_specs.append({ "title": "0", "pissn": "0000-0000", "eissn": "0000-0000" }) # another with a DOI and no fulltext source = ArticleFixtureFactory.make_article_source( eissn="1111-1111", pissn="1111-1111", doi="10.123/abc/1", fulltext=False) del source["bibjson"]["journal"] article = Article(**source) article.set_id() articles.append(article) if add_journal_info: journal_specs.append({ "title": "1", "pissn": "1111-1111", "eissn": "1111-1111" }) # one with a fulltext and no DOI source = ArticleFixtureFactory.make_article_source( eissn="2222-2222", pissn="2222-2222", fulltext="http://example.com/2", doi=False) del source["bibjson"]["journal"] article = Article(**source) article.set_id() articles.append(article) if add_journal_info: journal_specs.append({ "title": "2", "pissn": "2222-2222", "eissn": "2222-2222" }) # another one with a fulltext and no DOI source = ArticleFixtureFactory.make_article_source( eissn="3333-3333", pissn="3333-3333", fulltext="http://example.com/3", doi=False) del source["bibjson"]["journal"] article = Article(**source) article.set_id() articles.append(article) if add_journal_info: journal_specs.append({ "title": "3", "pissn": "3333-3333", "eissn": "3333-3333" }) last_issn = "3333-3333" last_doi = "10.123/abc/1" last_ft = "http://example.com/3" last_id = articles[-1].id if duplicate_in_batch: # one with a duplicated DOI source = ArticleFixtureFactory.make_article_source( eissn="4444-4444", pissn="4444-4444", doi="10.123/abc/0", fulltext="http://example.com/4") del source["bibjson"]["journal"] article = Article(**source) article.set_id() articles.append(article) if add_journal_info: journal_specs.append({ "title": "4", "pissn": "4444-4444", "eissn": "4444-4444" }) # one with a duplicated Fulltext source = ArticleFixtureFactory.make_article_source( eissn="5555-5555", pissn="5555-5555", doi="10.123/abc/5", fulltext="http://example.com/1") del source["bibjson"]["journal"] article = Article(**source) article.set_id() articles.append(article) if add_journal_info: journal_specs.append({ "title": "5", "pissn": "5555-5555", "eissn": "5555-5555" }) ilo_mock = None if account_arg == "owner": ilo_mock = BLLArticleMockFactory.is_legitimate_owner(legit=True) elif account_arg == "own_1": ilo_mock = BLLArticleMockFactory.is_legitimate_owner( legit_on_issn=[last_issn]) else: ilo_mock = BLLArticleMockFactory.is_legitimate_owner() self.svc.is_legitimate_owner = ilo_mock gd_mock = None if duplicate_in_index == 1: gd_mock = BLLArticleMockFactory.get_duplicate( given_article_id=last_id, eissn=last_issn, pissn=last_issn, doi=last_doi, fulltext=last_ft) elif duplicate_in_index == 2: gd_mock = BLLArticleMockFactory.get_duplicate(merge_duplicate=True) else: gd_mock = BLLArticleMockFactory.get_duplicate(return_none=True) self.svc.get_duplicate = gd_mock ios_mock = BLLArticleMockFactory.issn_ownership_status([], [], [], []) self.svc.issn_ownership_status = ios_mock self.svc._doi_or_fulltext_updated = BLLArticleMockFactory.doi_or_fulltext_updated( False, False) if add_journal_info: gj_mock = ModelArticleMockFactory.get_journal(journal_specs) Article.get_journal = gj_mock ########################################################### # Execution if raises is not None: with self.assertRaises(raises): try: self.svc.batch_create_articles(articles, account, duplicate_check, merge_duplicate, limit_to_account, add_journal_info) except exceptions.IngestException as e: if duplicate_in_index != 2: report = e.result assert report["success"] == success assert report["fail"] == fail assert report["update"] == update assert report["new"] == success - update raise else: report = self.svc.batch_create_articles(articles, account, duplicate_check, merge_duplicate, limit_to_account, add_journal_info) # make sure all the articles are saved before running the asserts aids = [(a.id, a.last_updated) for a in articles] for aid, lu in aids: Article.block(aid, lu, sleep=0.05) assert report["success"] == success assert report["fail"] == fail assert report["update"] == update assert report["new"] == success - update if success > 0: all_articles = Article.all() if len(all_articles) != success: time.sleep(0.5) all_articles = Article.all() assert len(all_articles) == success for article in all_articles: if add_journal_info: assert article.bibjson().journal_title is not None else: assert article.bibjson().journal_title is None else: # there's nothing in the article index with self.assertRaises(ESMappingMissingError): Article.all()
def test_01_create_article(self, value, kwargs): article_arg = kwargs.get("article") account_arg = kwargs.get("account") get_duplicate_result_arg = kwargs.get("get_duplicate_result") role_arg = kwargs.get("role") merge_duplicate_arg = kwargs.get("merge_duplicate") add_journal_info_arg = kwargs.get("add_journal_info") dry_run_arg = kwargs.get("dry_run") update_article_id_arg = kwargs.get("update_article_id") has_ft_doi_changed_arg = kwargs.get("has_ft_doi_changed_arg") raises_arg = kwargs.get("raises") success_arg = kwargs.get("success") original_saved_arg = kwargs.get("original_saved") merge_saved_arg = kwargs.get("merge_saved") ############################################### ## set up success = int(success_arg) has_ft_doi_changed = True if has_ft_doi_changed_arg == "yes" else False merge_duplicate = None if merge_duplicate_arg != "none": merge_duplicate = True if merge_duplicate_arg == "true" else False add_journal_info = None if add_journal_info_arg != "none": add_journal_info = True if add_journal_info_arg == "true" else False dry_run = None if dry_run_arg != "none": dry_run = True if dry_run_arg == "true" else False raises = EXCEPTIONS.get(raises_arg) eissn = "1234-5678" pissn = "9876-5432" doi = "10.123/abc/1" fulltext = "http://example.com/1" another_doi = "10.123/duplicate-1" another_eissn = "1111-1111" another_pissn = "2222-2222" duplicate_id = None original_id = None update_article_id = None if add_journal_info: jsource = JournalFixtureFactory.make_journal_source(in_doaj=True) j = Journal(**jsource) bj = j.bibjson() bj.title = "Add Journal Info Title" bj.remove_identifiers() bj.add_identifier(bj.P_ISSN, pissn) bj.add_identifier(bj.E_ISSN, eissn) j.save(blocking=True) if get_duplicate_result_arg == 'different': source = ArticleFixtureFactory.make_article_source( eissn=another_eissn, pissn=another_pissn, doi=doi, fulltext=fulltext) del source["bibjson"]["journal"] duplicate = Article(**source) duplicate.save() duplicate_id = duplicate.id article_id_to_upload = None if article_arg == "exists": source = ArticleFixtureFactory.make_article_source( eissn=eissn, pissn=pissn, doi=doi, fulltext=fulltext) del source["bibjson"]["journal"] article = Article(**source) article.set_id() article_id_to_upload = article.id if get_duplicate_result_arg == "itself": source = ArticleFixtureFactory.make_article_source( eissn=another_eissn, pissn=another_pissn, doi=doi, fulltext=fulltext) del source["bibjson"]["journal"] duplicate = Article(**source) duplicate.set_id(article_id_to_upload) duplicate.save() duplicate_id = duplicate.id if update_article_id_arg != "none": another_source = ArticleFixtureFactory.make_article_source( eissn=eissn, pissn=pissn, doi=doi, fulltext=fulltext) original = Article(**another_source) original.save(blocking=True) original_id = original.id if update_article_id_arg == "doi_ft_not_changed": article.bibjson().title = "This needs to be updated" elif update_article_id_arg == "doi_ft_changed_duplicate": article.bibjson().remove_identifiers("doi") article.bibjson().add_identifier("doi", another_doi) elif update_article_id_arg == "doi_ft_changed_ok": article.bibjson().remove_identifiers("doi") article.bibjson().add_identifier("doi", "10.1234/updated") else: update_article_id = None account = None if account_arg != "none": source = AccountFixtureFactory.make_publisher_source() account = Account(**source) legit = True if account_arg == "owner" else False ilo_mock = BLLArticleMockFactory.is_legitimate_owner(legit=legit) self.svc.is_legitimate_owner = ilo_mock owned = [eissn, pissn] if account_arg == "owner" else [] shared = [] unowned = [eissn] if account_arg == "not_owner" else [] unmatched = [pissn] if account_arg == "not_owner" else [] ios_mock = BLLArticleMockFactory.issn_ownership_status( owned, shared, unowned, unmatched) self.svc.issn_ownership_status = ios_mock if role_arg == "admin": account.set_role("admin") account.save() if get_duplicate_result_arg == "none": gd_mock = BLLArticleMockFactory.get_duplicate(return_none=True) elif get_duplicate_result_arg == "itself": gd_mock = BLLArticleMockFactory.get_duplicate( eissn=eissn, pissn=pissn, doi=doi, fulltext=fulltext, given_article_id=original_id) elif get_duplicate_result_arg == "different": gd_mock = BLLArticleMockFactory.get_duplicate( eissn=another_eissn, pissn=another_pissn, doi=doi, fulltext=fulltext, given_article_id=duplicate_id) else: gd_mock = BLLArticleMockFactory.get_duplicate( given_article_id="exception") self.svc.get_duplicate = gd_mock mock_article = self.svc.get_duplicate(article) if role_arg == "admin" or (role_arg == "publisher" and account_arg == "owner"): has_permissions_mock = BLLArticleMockFactory.has_permissions(True) else: has_permissions_mock = BLLArticleMockFactory.has_permissions(False) self.svc.has_permissions = has_permissions_mock prepare_update_admin_mock = BLLArticleMockFactory._prepare_update_admin( get_duplicate_result_arg, update_article_id_arg) self.svc._prepare_update_admin = prepare_update_admin_mock prepare_update_publisher_mock = BLLArticleMockFactory._prepare_update_publisher( get_duplicate_result_arg, has_ft_doi_changed) self.svc._prepare_update_publisher = prepare_update_publisher_mock ########################################################### # Execution if raises is not None: with self.assertRaises(raises): self.svc.create_article(article, account, merge_duplicate=merge_duplicate, add_journal_info=add_journal_info, dry_run=dry_run, update_article_id=original_id) else: report = self.svc.create_article(article, account, merge_duplicate=merge_duplicate, add_journal_info=add_journal_info, dry_run=dry_run, update_article_id=original_id) assert report["success"] == success # check that the article was saved and if it was saved that it was suitably merged if original_saved_arg == "yes" and update_article_id is not None: if get_duplicate_result_arg == "itself": original = Article.pull(update_article_id) assert original is not None assert report["update"] == 1, "update: {}".format( report["update"]) assert report["new"] == 0, "update: {}".format( report["new"]) elif original_saved_arg == "yes": if get_duplicate_result_arg == "itself": new = Article.pull(article_id_to_upload) assert new is not None assert report["update"] == 1, "update: {}".format( report["update"]) assert report["new"] == 0, "update: {}".format( report["new"]) elif get_duplicate_result_arg == "none": new = Article.pull(article_id_to_upload) assert new is not None assert report["update"] == 0, "update: {}".format( report["update"]) assert report["new"] == 1, "update: {}".format( report["new"]) if merge_saved_arg == "yes": merged = Article.pull(mock_article.id) assert merged is not None assert report["update"] == 1 elif mock_article is not None and mock_article.id != original_id: merged = Article.pull(mock_article.id) assert merged is None, "merged: {}".format(merged) if add_journal_info: assert article.bibjson( ).journal_title == "Add Journal Info Title" if update_article_id_arg == "doi_ft_changed_ok": original = Article.pull(original_id) assert original is not None elif update_article_id_arg == "doi_ft_not_changed": original = Article.pull(original_id) assert original is not None
def test_01_reject_application(self, name, application, application_status, account, prov, current_journal, note, save, raises=None): ####################################### ## set up if save == "fail": Suggestion.save = mock_save_fail ap = None journal = None if application == "exists": ap = Suggestion( **ApplicationFixtureFactory.make_application_source()) ap.set_application_status(application_status) ap.set_id(ap.makeid()) ap.remove_notes() if current_journal == "yes": journal = Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) journal.set_id(journal.makeid()) journal.set_current_application(ap.id) journal.save(blocking=True) ap.set_current_journal(journal.id) else: ap.remove_current_journal() acc = None if account == "publisher": acc = Account(**AccountFixtureFactory.make_publisher_source()) elif account == "admin": acc = Account( **AccountFixtureFactory.make_managing_editor_source()) provenance = None if prov != "none": provenance = prov == "true" thenote = None if note == "yes": thenote = "abcdefg" ######################################## ## execute svc = DOAJ.applicationService() if raises is not None and raises != "": with self.assertRaises(EXCEPTIONS[raises]): svc.reject_application(ap, acc, provenance, note=thenote) else: svc.reject_application(ap, acc, provenance, note=thenote) time.sleep(1) ####################################### ## Check ap2 = Suggestion.pull(ap.id) assert ap2 is not None assert ap2.application_status == constants.APPLICATION_STATUS_REJECTED assert ap2.current_journal is None # check the updated and manually updated date are essentially the same (they can theoretically differ # by a small amount just based on when they are set) updated_spread = abs( (ap2.last_updated_timestamp - ap2.last_manual_update_timestamp).total_seconds()) assert updated_spread <= 1.0 if current_journal == "yes" and journal is not None: j2 = Journal.pull(journal.id) assert j2 is not None assert j2.current_application is None assert ap2.related_journal == j2.id if prov == "true": pr = Provenance.get_latest_by_resource_id(ap.id) assert pr is not None if note == "yes": assert len(ap2.notes) == 1 assert ap2.notes[0].get("note") == "abcdefg" elif note == "no": assert len(ap2.notes) == 0
def test_01_create_article(self, name, kwargs): article_arg = kwargs.get("article") article_duplicate_arg = kwargs.get("article_duplicate") account_arg = kwargs.get("account") duplicate_check_arg = kwargs.get("duplicate_check") merge_duplicate_arg = kwargs.get("merge_duplicate") limit_to_account_arg = kwargs.get("limit_to_account") dry_run_arg = kwargs.get("dry_run") raises_arg = kwargs.get("raises") success_arg = kwargs.get("success") original_saved_arg = kwargs.get("original_saved") merge_saved_arg = kwargs.get("merge_saved") ############################################### ## set up success = int(success_arg) duplicate_check = None if duplicate_check_arg != "none": duplicate_check = True if duplicate_check_arg == "true" else False merge_duplicate = None if merge_duplicate_arg != "none": merge_duplicate = True if merge_duplicate_arg == "true" else False limit_to_account = None if limit_to_account_arg != "none": limit_to_account = True if limit_to_account_arg == "true" else False dry_run = None if dry_run_arg != "none": dry_run = True if dry_run_arg == "true" else False raises = EXCEPTIONS.get(raises_arg) article = None original_id = None if article_arg == "exists": source = ArticleFixtureFactory.make_article_source(eissn="1234-5678", pissn="9876-5432", doi="10.123/abc/1", fulltext="http://example.com/1") article = Article(**source) article.set_id() original_id = article.id account = None if account_arg != "none": source = AccountFixtureFactory.make_publisher_source() account = Account(**source) legit = True if account_arg == "owner" else False ilo_mock = BLLArticleMockFactory.is_legitimate_owner(legit=legit) self.svc.is_legitimate_owner = ilo_mock owned = ["1234-5678", "9876-5432"] if account_arg == "owner" else [] shared = [] unowned = ["1234-5678"] if account_arg == "not_owner" else [] unmatched = ["9876-5432"] if account_arg == "not_owner" else [] ios_mock = BLLArticleMockFactory.issn_ownership_status(owned, shared, unowned, unmatched) self.svc.issn_ownership_status = ios_mock gd_mock = None if article_duplicate_arg == "yes": gd_mock = BLLArticleMockFactory.get_duplicate(eissn="1234-5678", pissn="9876-5432", doi="10.123/abc/1", fulltext="http://example.com/1") else: gd_mock = BLLArticleMockFactory.get_duplicate(return_none=True) self.svc.get_duplicate = gd_mock mock_article = self.svc.get_duplicate(article) ########################################################### # Execution if raises is not None: with self.assertRaises(raises): self.svc.create_article(article, account, duplicate_check, merge_duplicate, limit_to_account, dry_run) else: report = self.svc.create_article(article, account, duplicate_check, merge_duplicate, limit_to_account, dry_run) assert report["success"] == success # check that the article was saved and if it was saved that it was suitably merged if original_saved_arg == "yes": original = Article.pull(original_id) assert original is not None assert report["update"] == 0 elif article is not None: original = Article.pull(original_id) assert original is None if merge_saved_arg == "yes": merged = Article.pull(mock_article.id) assert merged is not None assert report["update"] == 1 elif mock_article is not None: merged = Article.pull(mock_article.id) assert merged is None
def test_01_create_article(self, name, kwargs): article_arg = kwargs.get("article") article_duplicate_arg = kwargs.get("article_duplicate") account_arg = kwargs.get("account") duplicate_check_arg = kwargs.get("duplicate_check") merge_duplicate_arg = kwargs.get("merge_duplicate") limit_to_account_arg = kwargs.get("limit_to_account") add_journal_info_arg = kwargs.get("add_journal_info") dry_run_arg = kwargs.get("dry_run") raises_arg = kwargs.get("raises") success_arg = kwargs.get("success") original_saved_arg = kwargs.get("original_saved") merge_saved_arg = kwargs.get("merge_saved") ############################################### ## set up success = int(success_arg) duplicate_check = None if duplicate_check_arg != "none": duplicate_check = True if duplicate_check_arg == "true" else False merge_duplicate = None if merge_duplicate_arg != "none": merge_duplicate = True if merge_duplicate_arg == "true" else False limit_to_account = None if limit_to_account_arg != "none": limit_to_account = True if limit_to_account_arg == "true" else False add_journal_info = None if add_journal_info_arg != "none": add_journal_info = True if add_journal_info_arg == "true" else False dry_run = None if dry_run_arg != "none": dry_run = True if dry_run_arg == "true" else False raises = EXCEPTIONS.get(raises_arg) eissn = "1234-5678" pissn = "9876-5432" if add_journal_info: jsource = JournalFixtureFactory.make_journal_source(in_doaj=True) j = Journal(**jsource) bj = j.bibjson() bj.title = "Add Journal Info Title" bj.remove_identifiers() bj.add_identifier(bj.P_ISSN, pissn) bj.add_identifier(bj.E_ISSN, eissn) j.save(blocking=True) article = None original_id = None if article_arg == "exists": source = ArticleFixtureFactory.make_article_source( eissn=eissn, pissn=pissn, doi="10.123/abc/1", fulltext="http://example.com/1") del source["bibjson"]["journal"] article = Article(**source) article.set_id() original_id = article.id account = None if account_arg != "none": source = AccountFixtureFactory.make_publisher_source() account = Account(**source) legit = True if account_arg == "owner" else False ilo_mock = BLLArticleMockFactory.is_legitimate_owner(legit=legit) self.svc.is_legitimate_owner = ilo_mock owned = [eissn, pissn] if account_arg == "owner" else [] shared = [] unowned = [eissn] if account_arg == "not_owner" else [] unmatched = [pissn] if account_arg == "not_owner" else [] ios_mock = BLLArticleMockFactory.issn_ownership_status( owned, shared, unowned, unmatched) self.svc.issn_ownership_status = ios_mock gd_mock = None if article_duplicate_arg == "yes": gd_mock = BLLArticleMockFactory.get_duplicate( eissn=eissn, pissn=pissn, doi="10.123/abc/1", fulltext="http://example.com/1") else: gd_mock = BLLArticleMockFactory.get_duplicate(return_none=True) self.svc.get_duplicate = gd_mock mock_article = self.svc.get_duplicate(article) ########################################################### # Execution if raises is not None: with self.assertRaises(raises): self.svc.create_article(article, account, duplicate_check, merge_duplicate, limit_to_account, add_journal_info, dry_run) else: report = self.svc.create_article(article, account, duplicate_check, merge_duplicate, limit_to_account, add_journal_info, dry_run) assert report["success"] == success # check that the article was saved and if it was saved that it was suitably merged if original_saved_arg == "yes": original = Article.pull(original_id) assert original is not None assert report["update"] == 0 elif article is not None: original = Article.pull(original_id) assert original is None if merge_saved_arg == "yes": merged = Article.pull(mock_article.id) assert merged is not None assert report["update"] == 1 elif mock_article is not None: merged = Article.pull(mock_article.id) assert merged is None if add_journal_info: assert article.bibjson( ).journal_title == "Add Journal Info Title"
def test_01_get_duplicates(self, name, kwargs): article_arg = kwargs.get("article") owner_arg = kwargs.get("owner") doi_duplicates_arg = kwargs.get("doi_duplicates") fulltext_duplicates_arg = kwargs.get("fulltext_duplicates") overlap_arg = kwargs.get("overlap") raises_arg = kwargs.get("raises") raises = EXCEPTIONS.get(raises_arg) doi_duplicates = -1 if doi_duplicates_arg not in ["-"]: doi_duplicates = int(doi_duplicates_arg) fulltext_duplicates = -1 if fulltext_duplicates_arg not in ["-"]: fulltext_duplicates = int(fulltext_duplicates_arg) overlap = -1 if overlap_arg not in ["-"]: overlap = int(overlap_arg) expected_count = doi_duplicates + fulltext_duplicates - overlap ############################################### ## set up owner = None if owner_arg != "no": owner = Account(**AccountFixtureFactory.make_publisher_source()) owner_id = None if owner is not None: owner_id = owner.id # generate our incoming article article = None if article_arg == "yes": source = ArticleFixtureFactory.make_article_source() article = Article(**source) article.set_id() mock = BLLArticleMockFactory.discover_duplicates( doi_duplicates, fulltext_duplicates, overlap) self.svc.discover_duplicates = mock # determine if we expect a merge conflict dds = 0 if doi_duplicates < 0 else doi_duplicates fds = 0 if fulltext_duplicates < 0 else fulltext_duplicates ol = 0 if overlap < 0 else overlap expect_merge_conflict = dds + fds - ol > 1 ########################################################### # Execution first_article = None # first do get_duplicates if raises is not None: with self.assertRaises(raises): self.svc.get_duplicates(article) else: duplicates = self.svc.get_duplicates(article) if len(duplicates) > 0: first_article = duplicates[0] # check that we have the number of results we expected assert len(duplicates) == expected_count # check that the articles are unique in the list article_ids = [a.id for a in duplicates] article_ids.sort() deduped = list(set(article_ids)) deduped.sort( ) # so it's comparable to the article_ids list, as the set() call destroys ordering assert article_ids == deduped # i.e. that there were no duplicates # check that the articles are ordered by last_updated last_updateds = [ datetime.strptime(a.last_updated, "%Y-%m-%dT%H:%M:%SZ") for a in duplicates ] sorted_lu = sorted(last_updateds, reverse=True) assert sorted_lu == last_updateds # i.e. they were already sorted # then the same again on the singular get_duplicate if raises is not None: with self.assertRaises(raises): self.svc.get_duplicate(article) elif expect_merge_conflict: with self.assertRaises(exceptions.ArticleMergeConflict): self.svc.get_duplicate(article) else: duplicate = self.svc.get_duplicate(article) if expected_count > 0: assert isinstance(duplicate, Article) assert duplicate.id == first_article.id else: assert duplicate is None
def test_01_accept_application(self, name, application_type, account_type, manual_update, provenance, raises, result_provenance, result_manual_update): ############################################### ## set up # create the application application = None if application_type == "save_fail": application = Suggestion(**ApplicationFixtureFactory.make_application_source()) application.save = mock_save Journal.save = mock_save elif application_type == "with_current_journal": application = Suggestion(**ApplicationFixtureFactory.make_application_source()) application.remove_notes() application.add_note("unique 1", "2002-01-01T00:00:00Z") application.add_note("duplicate", "2001-01-01T00:00:00Z") cj = application.current_journal journal = Journal(**JournalFixtureFactory.make_journal_source()) journal.set_id(cj) journal.remove_notes() journal.add_note("unique 2", "2003-01-01T00:00:00Z") journal.add_note("duplicate", "2001-01-01T00:00:00Z") journal.save(blocking=True) elif application_type == "no_current_journal": application = Suggestion(**ApplicationFixtureFactory.make_application_source()) application.remove_current_journal() acc = None if account_type == "not_allowed": acc = Account(**AccountFixtureFactory.make_publisher_source()) elif account_type == "allowed": acc = Account(**AccountFixtureFactory.make_managing_editor_source()) mu = None if manual_update in ["true", "false"]: mu = manual_update == "true" prov = None if provenance in ["true", "false"]: prov = provenance == "true" save = bool(randint(0,1)) ########################################################### # Execution svc = DOAJ.applicationService() if raises != "": with self.assertRaises(EXCEPTIONS[raises]): svc.accept_application(application, acc, mu, prov) else: journal = svc.accept_application(application, acc, mu, prov, save_journal=save, save_application=save) # we need to sleep, so the index catches up time.sleep(1) # check a few common things assert application.application_status == constants.APPLICATION_STATUS_ACCEPTED assert application.current_journal is None assert journal.current_application is None assert application.related_journal == journal.id related = journal.related_applications if application_type == "with_current_journal": assert len(related) == 3 elif application_type == "no_current_journal": assert len(related) == 1 assert related[0].get("application_id") == application.id assert related[0].get("date_accepted") is not None if result_manual_update == "yes": assert journal.last_manual_update is not None assert journal.last_manual_update != "1970-01-01T00:00:00Z" assert application.last_manual_update is not None assert application.last_manual_update != "1970-01-01T00:00:00Z" elif result_manual_update == "no": assert journal.last_manual_update is None assert application.last_manual_update is None if application_type == "with_current_journal": assert len(journal.notes) == 3 notevals = [note.get("note") for note in journal.notes] assert "duplicate" in notevals assert "unique 1" in notevals assert "unique 2" in notevals app_prov = Provenance.get_latest_by_resource_id(application.id) if result_provenance == "yes": assert app_prov is not None elif result_provenance == "no": assert app_prov is None if save: pass
def create_edit_cases(): application_source = ApplicationFixtureFactory.make_application_source() account_source = AccountFixtureFactory.make_publisher_source() editable_application = Suggestion(**application_source) editable_application.set_application_status( constants.APPLICATION_STATUS_UPDATE_REQUEST) non_editable_application = Suggestion(**application_source) non_editable_application.set_application_status( constants.APPLICATION_STATUS_READY) owner_account = Account(**deepcopy(account_source)) owner_account.set_id(editable_application.owner) non_owner_publisher = Account(**deepcopy(account_source)) non_publisher = Account(**deepcopy(account_source)) non_publisher.remove_role("publisher") admin = Account(**deepcopy(account_source)) admin.add_role("admin") return [ param("no_app_no_account", None, None, raises=exceptions.ArgumentException), param("no_app_with_account", None, owner_account, raises=exceptions.ArgumentException), param("app_no_account", editable_application, None, raises=exceptions.ArgumentException), param("editable_app_owning_account", editable_application, owner_account, expected=True), param("editable_app_nonowning_account", editable_application, non_owner_publisher, raises=exceptions.AuthoriseException), param("editable_app_non_publisher_account", editable_application, non_publisher, raises=exceptions.AuthoriseException), param("editable_app_admin_account", editable_application, admin, expected=True), param("non_editable_app_owning_account", non_editable_application, owner_account, raises=exceptions.AuthoriseException), param("non_editable_app_nonowning_account", non_editable_application, non_owner_publisher, raises=exceptions.AuthoriseException), param("non_editable_app_non_publisher_account", non_editable_application, non_publisher, raises=exceptions.AuthoriseException), param("non_editable_app_admin_account", non_editable_application, admin, expected=True) ]
def test_01_discover_duplicates(self, name, kwargs): article_arg = kwargs.get("article") owner_arg = kwargs.get("owner") article_doi_arg = kwargs.get("article_doi") doi_duplicate_arg = kwargs.get("doi_duplicate") article_fulltext_arg = kwargs.get("article_fulltext") fulltext_duplicate_arg = kwargs.get("fulltext_duplicate") articles_by_doi_arg = kwargs.get("articles_by_doi") articles_by_fulltext_arg = kwargs.get("articles_by_fulltext") raises_arg = kwargs.get("raises") raises = EXCEPTIONS.get(raises_arg) ############################################### ## set up owner = None if owner_arg != "none": owner = Account(**AccountFixtureFactory.make_publisher_source()) owner_id = None if owner is not None: owner_id = owner.id # create a journal for the owner if owner_arg not in ["none"]: source = JournalFixtureFactory.make_journal_source(in_doaj=True) journal = Journal(**source) journal.set_owner(owner.id) journal.bibjson().remove_identifiers() journal.bibjson().add_identifier("eissn", "1234-5678") journal.bibjson().add_identifier("pissn", "9876-5432") journal.save() # determine what we need to load into the index article_ids = [] aids_block = [] if owner_arg not in ["none", "no_articles"]: for i, ident in enumerate(IDENTS): the_doi = ident["doi"] if doi_duplicate_arg == "padded": the_doi = " " + the_doi + " " elif doi_duplicate_arg == "prefixed": the_doi = "https://dx.doi.org/" + the_doi the_fulltext = ident["fulltext"] if article_fulltext_arg != "invalid": if fulltext_duplicate_arg == "padded": the_fulltext = " http:" + the_fulltext elif fulltext_duplicate_arg == "http": the_fulltext = "http:" + the_fulltext elif fulltext_duplicate_arg == "https": the_fulltext = "https:" + the_fulltext else: the_fulltext = "http:" + the_fulltext source = ArticleFixtureFactory.make_article_source( eissn="1234-5678", pissn="9876-5432", doi=the_doi, fulltext=the_fulltext) article = Article(**source) article.set_id() article.save(blocking=True) article_ids.append(article.id) aids_block.append((article.id, article.last_updated)) # generate our incoming article article = None doi = None fulltext = None if article_arg == "yes": eissn = "1234=5678" # one matching pissn = "6789-1234" # the other not - issn matches are not relevant to this test if article_doi_arg in ["yes", "padded"]: doi = "10.1234/abc/11" if doi_duplicate_arg in ["yes", "padded"]: doi = IDENTS[0]["doi"] if article_doi_arg == "padded": doi = " doi:" + doi + " " elif article_doi_arg in ["invalid"]: doi = IDENTS[-1]["doi"] if article_fulltext_arg in ["yes", "padded", "https"]: fulltext = "//example.com/11" if fulltext_duplicate_arg in ["yes", "padded", "https"]: fulltext = IDENTS[0]["fulltext"] if fulltext_duplicate_arg == "padded": fulltext = " http:" + fulltext + " " elif fulltext_duplicate_arg == "https": fulltext = "https:" + fulltext else: fulltext = "http:" + fulltext elif article_fulltext_arg == "invalid": fulltext = IDENTS[-1]["fulltext"] source = ArticleFixtureFactory.make_article_source( eissn=eissn, pissn=pissn, doi=doi, fulltext=fulltext) article = Article(**source) # we need to do this if doi or fulltext are none, because the factory will set a default if we don't # provide them if doi is None: article.bibjson().remove_identifiers("doi") if fulltext is None: article.bibjson().remove_urls("fulltext") article.set_id() Article.blockall(aids_block) ########################################################### # Execution svc = DOAJ.articleService() if raises is not None: with self.assertRaises(raises): svc.discover_duplicates(article) else: possible_articles = svc.discover_duplicates(article) if articles_by_doi_arg == "yes": assert "doi" in possible_articles assert len(possible_articles["doi"]) == 1 # if this is the "invalid" doi, then we expect it to match the final article, otherwise match the first if article_doi_arg == "invalid": assert possible_articles["doi"][0].id == article_ids[-1] else: assert possible_articles["doi"][0].id == article_ids[0] else: if possible_articles is not None: assert "doi" not in possible_articles if articles_by_fulltext_arg == "yes": assert "fulltext" in possible_articles assert len(possible_articles["fulltext"]) == 1 # if this is the "invalid" fulltext url, then we expect it to match the final article, otherwise match the first if article_fulltext_arg == "invalid": assert possible_articles["fulltext"][0].id == article_ids[ -1] else: assert possible_articles["fulltext"][0].id == article_ids[ 0] else: if possible_articles is not None: assert "fulltext" not in possible_articles
def test_01_issn_ownership_status(self, name, kwargs): article_arg = kwargs.get("article") owner_arg = kwargs.get("owner") article_eissn_arg = kwargs.get("article_eissn") article_pissn_arg = kwargs.get("article_pissn") seen_eissn_arg = kwargs.get("seen_eissn") seen_pissn_arg = kwargs.get("seen_pissn") journal_owner_arg = kwargs.get("journal_owner") raises_arg = kwargs.get("raises") raises = EXCEPTIONS.get(raises_arg) ############################################### ## set up owner = None if owner_arg != "none": owner = Account(**AccountFixtureFactory.make_publisher_source()) owner_id = None if owner is not None: owner_id = owner.id # generate our incoming article article = None eissn = None pissn = None if article_arg == "exists": source = ArticleFixtureFactory.make_article_source() article = Article(**source) article.set_id() article.bibjson().remove_identifiers("pissn") if article_pissn_arg == "yes": pissn = "1234-5678" article.bibjson().add_identifier("pissn", pissn) article.bibjson().remove_identifiers("eissn") if article_eissn_arg == "yes": eissn = "9876-5432" article.bibjson().add_identifier("eissn", eissn) issns = [] if eissn is not None and pissn is not None and seen_eissn_arg == "yes" and seen_pissn_arg == "yes": issns.append((eissn, pissn)) if eissn is not None and seen_eissn_arg == "yes": issns.append((eissn, "4321-9876")) issns.append((eissn, None)) if pissn is not None and seen_pissn_arg == "yes": issns.append(("6789-4321", pissn)) issns.append((None, pissn)) owners = [] if journal_owner_arg == "none": owners = [None] elif journal_owner_arg == "correct" and owner_id is not None: owners = [owner_id] elif journal_owner_arg == "incorrect": owners = ["randomowner"] elif journal_owner_arg == "mix" and owner_id is not None: owners.append(owner_id) owners.append("randomowner") owners.append(None) mock = ModelJournalMockFactory.find_by_issn(issns, owners) Journal.find_by_issn = mock ########################################################### # Execution svc = DOAJ.articleService() if raises is not None: with self.assertRaises(raises): svc.issn_ownership_status(article, owner_id) else: owned, shared, unowned, unmatched = svc.issn_ownership_status( article, owner_id) owned_count = 0 if seen_eissn_arg == "yes" and eissn is not None and journal_owner_arg in [ "correct" ]: assert eissn in owned owned_count += 1 elif eissn is not None: assert eissn not in owned if seen_pissn_arg == "yes" and pissn is not None and journal_owner_arg in [ "correct" ]: assert pissn in owned owned_count += 1 elif pissn is not None: assert pissn not in owned assert len(owned) == owned_count shared_count = 0 if seen_eissn_arg == "yes" and eissn is not None and journal_owner_arg in [ "mix" ]: assert eissn in shared shared_count += 1 elif eissn is not None: assert eissn not in shared if seen_pissn_arg == "yes" and pissn is not None and journal_owner_arg in [ "mix" ]: assert pissn in shared shared_count += 1 elif pissn is not None: assert pissn not in shared assert len(shared) == shared_count unowned_count = 0 if seen_eissn_arg == "yes" and eissn is not None and journal_owner_arg in [ "incorrect", "none" ]: assert eissn in unowned unowned_count += 1 elif eissn is not None: assert eissn not in unowned if seen_pissn_arg == "yes" and pissn is not None and journal_owner_arg in [ "incorrect", "none" ]: assert pissn in unowned unowned_count += 1 elif pissn is not None: assert pissn not in unowned assert len(unowned) == unowned_count unmatched_count = 0 if seen_eissn_arg == "no" and eissn is not None: assert eissn in unmatched unmatched_count += 1 elif eissn is not None: assert eissn not in unmatched if seen_pissn_arg == "no" and pissn is not None: assert pissn in unmatched unmatched_count += 1 elif pissn is not None: assert pissn not in unmatched assert len(unmatched) == unmatched_count
def test_01_batch_create_article(self, name, kwargs): articles_arg = kwargs.get("articles") duplicate_in_batch_arg = kwargs.get("duplicate_in_batch") duplicate_in_index_arg = kwargs.get("duplicate_in_index") account_arg = kwargs.get("account") duplicate_check_arg = kwargs.get("duplicate_check") merge_duplicate_arg = kwargs.get("merge_duplicate") limit_to_account_arg = kwargs.get("limit_to_account") raises_arg = kwargs.get("raises") success_arg = kwargs.get("success") fail_arg = kwargs.get("fail") update_arg = kwargs.get("update") ############################################### ## set up success = int(success_arg) fail = int(fail_arg) update = int(update_arg) duplicate_in_batch = duplicate_in_batch_arg == "yes" duplicate_in_index = duplicate_in_index_arg == "yes" raises = EXCEPTIONS.get(raises_arg) duplicate_check = None if duplicate_check_arg != "none": duplicate_check = True if duplicate_check_arg == "true" else False merge_duplicate = None if merge_duplicate_arg != "none": merge_duplicate = True if merge_duplicate_arg == "true" else False limit_to_account = None if limit_to_account_arg != "none": limit_to_account = True if limit_to_account_arg == "true" else False account = None if account_arg != "none": source = AccountFixtureFactory.make_publisher_source() account = Account(**source) last_doi = None last_ft = None last_issn = None last_id = None articles = None if articles_arg != "none": articles = [] if articles_arg == "yes": # one with a DOI and no fulltext source = ArticleFixtureFactory.make_article_source( eissn="0000-0000", pissn="0000-0000", doi="10.123/abc/0", fulltext=False) article = Article(**source) article.set_id() articles.append(article) # another with a DOI and no fulltext source = ArticleFixtureFactory.make_article_source( eissn="1111-1111", pissn="1111-1111", doi="10.123/abc/1", fulltext=False) article = Article(**source) article.set_id() articles.append(article) # one with a fulltext and no DOI source = ArticleFixtureFactory.make_article_source( eissn="2222-2222", pissn="2222-2222", fulltext="http://example.com/2", doi=False) article = Article(**source) article.set_id() articles.append(article) # another one with a fulltext and no DOI source = ArticleFixtureFactory.make_article_source( eissn="3333-3333", pissn="3333-3333", fulltext="http://example.com/3", doi=False) article = Article(**source) article.set_id() articles.append(article) last_issn = "3333-3333" last_doi = "10.123/abc/1" last_ft = "http://example.com/3" last_id = articles[-1].id if duplicate_in_batch: # one with a duplicated DOI source = ArticleFixtureFactory.make_article_source( eissn="4444-4444", pissn="4444-4444", doi="10.123/abc/0", fulltext="http://example.com/4") article = Article(**source) article.set_id() articles.append(article) # one with a duplicated Fulltext source = ArticleFixtureFactory.make_article_source( eissn="5555-5555", pissn="5555-5555", doi="10.123/abc/5", fulltext="http://example.com/1") article = Article(**source) article.set_id() articles.append(article) """ article_count = int(articles_arg) for i in range(article_count): idx = str(i) if duplicate_in_batch: if i < 2: idx = "duplicate" last_issn = str(i) * 4 + "-" + str(i) * 4 last_doi = "10.123/abc/" + idx last_ft = "http://example.com/" + idx source = ArticleFixtureFactory.make_article_source(eissn=last_issn, pissn=last_issn, doi=last_doi, fulltext=last_ft) article = Article(**source) article.set_id() last_id = article.id articles.append(article) """ ilo_mock = None if account_arg == "owner": ilo_mock = BLLArticleMockFactory.is_legitimate_owner(legit=True) elif account_arg == "own_1": ilo_mock = BLLArticleMockFactory.is_legitimate_owner( legit_on_issn=[last_issn]) else: ilo_mock = BLLArticleMockFactory.is_legitimate_owner() self.svc.is_legitimate_owner = ilo_mock gd_mock = None if duplicate_in_index: gd_mock = BLLArticleMockFactory.get_duplicate( given_article_id=last_id, eissn=last_issn, pissn=last_issn, doi=last_doi, fulltext=last_ft) else: gd_mock = BLLArticleMockFactory.get_duplicate(return_none=True) self.svc.get_duplicate = gd_mock ios_mock = BLLArticleMockFactory.issn_ownership_status([], [], [], []) self.svc.issn_ownership_status = ios_mock ########################################################### # Execution if raises is not None: with self.assertRaises(raises): try: self.svc.batch_create_articles(articles, account, duplicate_check, merge_duplicate, limit_to_account) except exceptions.IngestException as e: report = e.result assert report["success"] == success assert report["fail"] == fail assert report["update"] == update assert report["new"] == success - update raise else: report = self.svc.batch_create_articles(articles, account, duplicate_check, merge_duplicate, limit_to_account) # make sure all the articles are saved before running the asserts aids = [(a.id, a.last_updated) for a in articles] for aid, lu in aids: Article.block(aid, lu, sleep=0.05) assert report["success"] == success assert report["fail"] == fail assert report["update"] == update assert report["new"] == success - update if success > 0: all_articles = Article.all() if len(all_articles) != success: time.sleep(0.5) all_articles = Article.all() assert len(all_articles) == success else: # there's nothing in the article index with self.assertRaises(ESMappingMissingError): Article.all()
def test_01_update_request(self, name, journal_id, journal_lock, account, account_role, account_is_owner, current_applications, application_lock, application_status, completed_applications, raises, return_app, return_jlock, return_alock, db_jlock, db_alock, db_app): ############################################### ## set up # create the journal journal = None jid = None if journal_id == "valid": journal = Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) journal.remove_related_applications() journal.remove_current_application() jid = journal.id elif journal_id == "not_in_doaj": journal = Journal(**JournalFixtureFactory.make_journal_source( in_doaj=False)) journal.remove_related_applications() journal.remove_current_application() jid = journal.id elif journal_id == "missing": jid = uuid.uuid4().hex acc = None if account == "yes": acc = Account(**AccountFixtureFactory.make_publisher_source()) if account_role == "none": acc.remove_role("publisher") elif account_role == "admin": acc.remove_role("publisher") acc.add_role("admin") acc.set_id(acc.makeid()) if account_is_owner == "yes": acc.set_id(journal.owner) if journal_lock == "yes": lock.lock("journal", jid, "someoneelse", blocking=True) latest_app = None current_app_count = int(current_applications) for i in range(current_app_count): app = Suggestion( **ApplicationFixtureFactory.make_application_source()) app.set_id(app.makeid()) app.set_created("198" + str(i) + "-01-01T00:00:00Z") app.set_current_journal(jid) app.save() latest_app = app if journal is not None: journal.set_current_application(app.id) comp_app_count = int(completed_applications) for i in range(comp_app_count): app = Suggestion( **ApplicationFixtureFactory.make_application_source()) app.set_id(app.makeid()) app.set_created("197" + str(i) + "-01-01T00:00:00Z") app.set_related_journal(jid) app.save() if journal is not None: journal.add_related_application(app.id, date_accepted=app.created_date) if current_app_count == 0 and comp_app_count == 0: # save at least one record to initialise the index mapping, otherwise tests fail app = Suggestion( **ApplicationFixtureFactory.make_application_source()) app.set_id(app.makeid()) app.save() if application_lock == "yes": lock.lock("suggestion", latest_app.id, "someoneelse", blocking=True) if application_status != "n/a": latest_app.set_application_status(application_status) latest_app.save(blocking=True) # finally save the journal record, ensuring we get a blocking save, so everything # above here should be synchronised with the repo if journal is not None: journal.save(blocking=True) ########################################################### # Execution svc = DOAJ.applicationService() if raises != "": with self.assertRaises(EXCEPTIONS[raises]): svc.update_request_for_journal(jid, acc) else: application, jlock, alock = svc.update_request_for_journal( jid, acc) # we need to sleep, so the index catches up time.sleep(1) if return_app == "none": assert application is None elif return_app == "yes": assert application is not None if return_jlock == "none": assert jlock is None elif return_jlock == "yes": assert jlock is not None if return_alock == "none": assert alock is None elif return_alock == "yes": assert alock is not None if db_jlock == "no" and acc is not None: assert not lock.has_lock("journal", jid, acc.id) elif db_jlock == "yes" and acc is not None: l = lock.has_lock("journal", jid, acc.id) assert lock.has_lock("journal", jid, acc.id) if db_alock == "no" and application.id is not None and acc is not None: assert not lock.has_lock("suggestion", application.id, acc.id) elif db_alock == "yes" and application.id is not None and acc is not None: assert lock.has_lock("suggestion", application.id, acc.id) if db_app == "no" and application.id is not None: indb = Suggestion.q2obj(q="id.exact:" + application.id) assert indb is None elif db_app == "yes" and application.id is not None: indb = Suggestion.q2obj(q="id.exact:" + application.id) assert indb is not None if current_app_count == 0 and comp_app_count == 0 and application is not None: assert application.article_metadata is None assert application.articles_last_year is None elif application is not None: assert application.article_metadata is not None assert application.articles_last_year is not None