def test_has_permissions(self): journal_source = JournalFixtureFactory.make_journal_source() journal1 = Journal(**journal_source) publisher_owner_src = AccountFixtureFactory.make_publisher_source() publisher_owner = Account(**publisher_owner_src) publisher_stranged_src = AccountFixtureFactory.make_publisher_source() publisher_stranged = Account(**publisher_stranged_src) admin_src = AccountFixtureFactory.make_managing_editor_source() admin = Account(**admin_src) journal1.set_owner(publisher_owner) journal1.save(blocking=True) eissn = journal1.bibjson().get_one_identifier("eissn") pissn = journal1.bibjson().get_one_identifier("pissn") art_source = ArticleFixtureFactory.make_article_source(eissn=eissn, pissn=pissn) article = Article(**art_source) assert self.svc.has_permissions(publisher_stranged, article, False) assert self.svc.has_permissions(publisher_owner, article, True) assert self.svc.has_permissions(admin, article, True) failed_result = self.svc.has_permissions(publisher_stranged, article, True) assert failed_result == {'success': 0, 'fail': 1, 'update': 0, 'new': 0, 'shared': [], 'unowned': [pissn, eissn], 'unmatched': []}, "received: {}".format(failed_result)
def find_by_issn(cls, issns, owners): journals = [] seen_issns = [] for owner in owners: for eissn, pissn in issns: if eissn not in seen_issns and eissn is not None: seen_issns.append(eissn) if pissn not in seen_issns and pissn is not None: seen_issns.append(pissn) source = JournalFixtureFactory.make_journal_source( in_doaj=True) journal = Journal(**source) journal.set_owner(owner) journal.bibjson().remove_identifiers("eissn") journal.bibjson().remove_identifiers("pissn") if eissn is not None: journal.bibjson().add_identifier("eissn", eissn) if pissn is not None: journal.bibjson().add_identifier("pissn", pissn) journals.append(journal) @classmethod def mock(cls, issns, in_doaj=None, max=10): if not isinstance(issns, list): issns = [issns] for issn in issns: if issn in seen_issns: return journals return [] return mock
def form2obj(form, existing_journal): journal = Journal() bibjson = journal.bibjson() # The if statements that wrap practically every field are there due to this # form being used to edit old journals which don't necessarily have most of # this info. # It also allows admins to delete the contents of any field if they wish, # by ticking the "Allow incomplete form" checkbox and deleting the contents # of that field. The if condition(s) will then *not* add the relevant field to the # new journal object being constructed. # add_url in the journal model has a safeguard against empty URL-s. if form.title.data: bibjson.title = form.title.data bibjson.add_url(form.url.data, urltype='homepage') if form.alternative_title.data: bibjson.alternative_title = form.alternative_title.data if form.pissn.data: bibjson.add_identifier(bibjson.P_ISSN, form.pissn.data) if form.eissn.data: bibjson.add_identifier(bibjson.E_ISSN, form.eissn.data) if form.publisher.data: bibjson.publisher = form.publisher.data if form.society_institution.data: bibjson.institution = form.society_institution.data if form.platform.data: bibjson.provider = form.platform.data if form.contact_name.data or form.contact_email.data: journal.add_contact(form.contact_name.data, form.contact_email.data) if form.country.data: bibjson.country = form.country.data if forms.interpret_special(form.processing_charges.data): bibjson.set_apc(form.processing_charges_currency.data, form.processing_charges_amount.data) if forms.interpret_special(form.submission_charges.data): bibjson.set_submission_charges(form.submission_charges_currency.data, form.submission_charges_amount.data) if forms.interpret_special(form.waiver_policy.data): bibjson.add_url(form.waiver_policy_url.data, 'waiver_policy') # checkboxes if forms.interpret_special(form.digital_archiving_policy.data) or form.digital_archiving_policy_url.data: archiving_policies = forms.interpret_special(form.digital_archiving_policy.data) archiving_policies = forms.interpret_other(archiving_policies, form.digital_archiving_policy_other.data, store_other_label=True) archiving_policies = forms.interpret_other(archiving_policies, form.digital_archiving_policy_library.data, forms.digital_archiving_policy_specific_library_value, store_other_label=True) bibjson.set_archiving_policy(archiving_policies, form.digital_archiving_policy_url.data) if form.crawl_permission.data and form.crawl_permission.data != 'None': bibjson.allows_fulltext_indexing = forms.interpret_special(form.crawl_permission.data) # just binary # checkboxes article_ids = forms.interpret_special(form.article_identifiers.data) article_ids = forms.interpret_other(article_ids, form.article_identifiers_other.data) if article_ids: bibjson.persistent_identifier_scheme = article_ids if (form.download_statistics.data and form.download_statistics.data != 'None') or form.download_statistics_url.data: bibjson.set_article_statistics(form.download_statistics_url.data, forms.interpret_special(form.download_statistics.data)) if form.first_fulltext_oa_year.data: bibjson.set_oa_start(year=form.first_fulltext_oa_year.data) # checkboxes fulltext_format = forms.interpret_other(form.fulltext_format.data, form.fulltext_format_other.data) if fulltext_format: bibjson.format = fulltext_format if form.keywords.data: bibjson.set_keywords(form.keywords.data) # tag list field if form.languages.data: bibjson.set_language(form.languages.data) # select multiple field - gives a list back bibjson.add_url(form.editorial_board_url.data, urltype='editorial_board') if form.review_process.data or form.review_process_url.data: bibjson.set_editorial_review(form.review_process.data, form.review_process_url.data) bibjson.add_url(form.aims_scope_url.data, urltype='aims_scope') bibjson.add_url(form.instructions_authors_url.data, urltype='author_instructions') if (form.plagiarism_screening.data and form.plagiarism_screening.data != 'None') or form.plagiarism_screening_url.data: bibjson.set_plagiarism_detection( form.plagiarism_screening_url.data, has_detection=forms.interpret_special(form.plagiarism_screening.data) ) if form.publication_time.data: bibjson.publication_time = form.publication_time.data bibjson.add_url(form.oa_statement_url.data, urltype='oa_statement') license_type = forms.interpret_other(form.license.data, form.license_other.data) if forms.interpret_special(license_type): # "None" and "False" as strings like they come out of the WTForms processing) # would get interpreted correctly by this check, so "None" licenses should not appear if license_type in licenses: by = licenses[license_type]['BY'] nc = licenses[license_type]['NC'] nd = licenses[license_type]['ND'] sa = licenses[license_type]['SA'] license_title = licenses[license_type]['title'] elif form.license_checkbox.data: by = True if 'BY' in form.license_checkbox.data else False nc = True if 'NC' in form.license_checkbox.data else False nd = True if 'ND' in form.license_checkbox.data else False sa = True if 'SA' in form.license_checkbox.data else False license_title = license_type else: by = None; nc = None; nd = None; sa = None; license_title = license_type bibjson.set_license( license_title, license_type, url=form.license_url.data, open_access=forms.interpret_special(form.open_access.data), by=by, nc=nc, nd=nd, sa=sa, embedded=forms.interpret_special(form.license_embedded.data), embedded_example_url=form.license_embedded_url.data ) # checkboxes deposit_policies = forms.interpret_special(form.deposit_policy.data) # need empty list if it's just "None" deposit_policies = forms.interpret_other(deposit_policies, form.deposit_policy_other.data) if deposit_policies: bibjson.deposit_policy = deposit_policies if form.copyright.data and form.copyright.data != 'None': holds_copyright = forms.interpret_other( forms.interpret_special(form.copyright.data), form.copyright_other.data ) bibjson.set_author_copyright(form.copyright_url.data, holds_copyright=holds_copyright) if form.publishing_rights.data and form.publishing_rights.data != 'None': publishing_rights = forms.interpret_other( forms.interpret_special(form.publishing_rights.data), form.publishing_rights_other.data ) bibjson.set_author_publishing_rights(form.publishing_rights_url.data, holds_rights=publishing_rights) # need to copy over the notes from the existing journal object, if any, otherwise # the dates on all the notes will get reset to right now (i.e. last_updated) # since the journal object we're creating in this xwalk is a new, empty one journal.set_notes(existing_journal.notes()) # generate index of notes, just the text curnotes = [] for curnote in journal.notes(): curnotes.append(curnote['note']) # add any new notes formnotes = [] for formnote in form.notes.data: if formnote['note']: if formnote['note'] not in curnotes and formnote["note"] != "": journal.add_note(formnote['note']) # also generate another text index of notes, this time an index of the form notes formnotes.append(formnote['note']) if current_user.has_role("delete_note"): # delete all notes not coming back from the form, means they've been deleted # also if one of the saved notes is completely blank, delete it for curnote in journal.notes()[:]: if not curnote['note'] or curnote['note'] not in formnotes: journal.remove_note(curnote) new_subjects = [] for code in form.subject.data: sobj = {"scheme": 'LCC', "term": lcc.lookup_code(code), "code": code} new_subjects.append(sobj) bibjson.set_subjects(new_subjects) owner = form.owner.data.strip() if owner: journal.set_owner(owner) editor_group = form.editor_group.data.strip() if editor_group: journal.set_editor_group(editor_group) editor = form.editor.data.strip() if editor: journal.set_editor(editor) # old fields - only create them in the journal record if the values actually exist # need to use interpret_special in the test condition in case 'None' comes back from the form if getattr(form, 'author_pays', None): if forms.interpret_special(form.author_pays.data): bibjson.author_pays = form.author_pays.data if getattr(form, 'author_pays_url', None): if forms.interpret_special(form.author_pays_url.data): bibjson.author_pays_url = form.author_pays_url.data if getattr(form, 'oa_end_year', None): if forms.interpret_special(form.oa_end_year.data): bibjson.set_oa_end(form.oa_end_year.data) return journal
def test_01_discover_duplicates(self, name, kwargs): article_arg = kwargs.get("article") owner_arg = kwargs.get("owner") article_doi_arg = kwargs.get("article_doi") doi_duplicate_arg = kwargs.get("doi_duplicate") article_fulltext_arg = kwargs.get("article_fulltext") fulltext_duplicate_arg = kwargs.get("fulltext_duplicate") articles_by_doi_arg = kwargs.get("articles_by_doi") articles_by_fulltext_arg = kwargs.get("articles_by_fulltext") raises_arg = kwargs.get("raises") raises = EXCEPTIONS.get(raises_arg) ############################################### ## set up owner = None if owner_arg != "none": owner = Account(**AccountFixtureFactory.make_publisher_source()) owner_id = None if owner is not None: owner_id = owner.id # create a journal for the owner if owner_arg not in ["none"]: source = JournalFixtureFactory.make_journal_source(in_doaj=True) journal = Journal(**source) journal.set_owner(owner.id) journal.bibjson().remove_identifiers() journal.bibjson().add_identifier("eissn", "1234-5678") journal.bibjson().add_identifier("pissn", "9876-5432") journal.save() # determine what we need to load into the index article_ids = [] aids_block = [] if owner_arg not in ["none", "no_articles"]: for i, ident in enumerate(IDENTS): the_doi = ident["doi"] if doi_duplicate_arg == "padded": the_doi = " " + the_doi + " " elif doi_duplicate_arg == "prefixed": the_doi = "https://dx.doi.org/" + the_doi the_fulltext = ident["fulltext"] if article_fulltext_arg != "invalid": if fulltext_duplicate_arg == "padded": the_fulltext = " http:" + the_fulltext elif fulltext_duplicate_arg == "http": the_fulltext = "http:" + the_fulltext elif fulltext_duplicate_arg == "https": the_fulltext = "https:" + the_fulltext else: the_fulltext = "http:" + the_fulltext source = ArticleFixtureFactory.make_article_source( eissn="1234-5678", pissn="9876-5432", doi=the_doi, fulltext=the_fulltext) article = Article(**source) article.set_id() article.save(blocking=True) article_ids.append(article.id) aids_block.append((article.id, article.last_updated)) # generate our incoming article article = None doi = None fulltext = None if article_arg == "yes": eissn = "1234=5678" # one matching pissn = "6789-1234" # the other not - issn matches are not relevant to this test if article_doi_arg in ["yes", "padded"]: doi = "10.1234/abc/11" if doi_duplicate_arg in ["yes", "padded"]: doi = IDENTS[0]["doi"] if article_doi_arg == "padded": doi = " doi:" + doi + " " elif article_doi_arg in ["invalid"]: doi = IDENTS[-1]["doi"] if article_fulltext_arg in ["yes", "padded", "https"]: fulltext = "//example.com/11" if fulltext_duplicate_arg in ["yes", "padded", "https"]: fulltext = IDENTS[0]["fulltext"] if fulltext_duplicate_arg == "padded": fulltext = " http:" + fulltext + " " elif fulltext_duplicate_arg == "https": fulltext = "https:" + fulltext else: fulltext = "http:" + fulltext elif article_fulltext_arg == "invalid": fulltext = IDENTS[-1]["fulltext"] source = ArticleFixtureFactory.make_article_source( eissn=eissn, pissn=pissn, doi=doi, fulltext=fulltext) article = Article(**source) # we need to do this if doi or fulltext are none, because the factory will set a default if we don't # provide them if doi is None: article.bibjson().remove_identifiers("doi") if fulltext is None: article.bibjson().remove_urls("fulltext") article.set_id() Article.blockall(aids_block) ########################################################### # Execution svc = DOAJ.articleService() if raises is not None: with self.assertRaises(raises): svc.discover_duplicates(article) else: possible_articles = svc.discover_duplicates(article) if articles_by_doi_arg == "yes": assert "doi" in possible_articles assert len(possible_articles["doi"]) == 1 # if this is the "invalid" doi, then we expect it to match the final article, otherwise match the first if article_doi_arg == "invalid": assert possible_articles["doi"][0].id == article_ids[-1] else: assert possible_articles["doi"][0].id == article_ids[0] else: if possible_articles is not None: assert "doi" not in possible_articles if articles_by_fulltext_arg == "yes": assert "fulltext" in possible_articles assert len(possible_articles["fulltext"]) == 1 # if this is the "invalid" fulltext url, then we expect it to match the final article, otherwise match the first if article_fulltext_arg == "invalid": assert possible_articles["fulltext"][0].id == article_ids[ -1] else: assert possible_articles["fulltext"][0].id == article_ids[ 0] else: if possible_articles is not None: assert "fulltext" not in possible_articles
def form2obj(form, existing_journal): journal = Journal() bibjson = journal.bibjson() # The if statements that wrap practically every field are there due to this # form being used to edit old journals which don't necessarily have most of # this info. # It also allows admins to delete the contents of any field if they wish, # by ticking the "Allow incomplete form" checkbox and deleting the contents # of that field. The if condition(s) will then *not* add the relevant field to the # new journal object being constructed. # add_url in the journal model has a safeguard against empty URL-s. if form.title.data: bibjson.title = form.title.data bibjson.add_url(form.url.data, urltype='homepage') if form.alternative_title.data: bibjson.alternative_title = form.alternative_title.data if form.pissn.data: bibjson.add_identifier(bibjson.P_ISSN, form.pissn.data) if form.eissn.data: bibjson.add_identifier(bibjson.E_ISSN, form.eissn.data) if form.publisher.data: bibjson.publisher = form.publisher.data if form.society_institution.data: bibjson.institution = form.society_institution.data if form.platform.data: bibjson.provider = form.platform.data if form.contact_name.data or form.contact_email.data: journal.add_contact(form.contact_name.data, form.contact_email.data) if form.country.data: bibjson.country = form.country.data if forms.interpret_special(form.processing_charges.data): bibjson.set_apc(form.processing_charges_currency.data, form.processing_charges_amount.data) if forms.interpret_special(form.submission_charges.data): bibjson.set_submission_charges( form.submission_charges_currency.data, form.submission_charges_amount.data) if forms.interpret_special(form.waiver_policy.data): bibjson.add_url(form.waiver_policy_url.data, 'waiver_policy') # checkboxes if forms.interpret_special(form.digital_archiving_policy.data ) or form.digital_archiving_policy_url.data: archiving_policies = forms.interpret_special( form.digital_archiving_policy.data) archiving_policies = forms.interpret_other( archiving_policies, form.digital_archiving_policy_other.data, store_other_label=True) archiving_policies = forms.interpret_other( archiving_policies, form.digital_archiving_policy_library.data, forms.digital_archiving_policy_specific_library_value, store_other_label=True) bibjson.set_archiving_policy( archiving_policies, form.digital_archiving_policy_url.data) if form.crawl_permission.data and form.crawl_permission.data != 'None': bibjson.allows_fulltext_indexing = forms.interpret_special( form.crawl_permission.data) # just binary # checkboxes article_ids = forms.interpret_special(form.article_identifiers.data) article_ids = forms.interpret_other( article_ids, form.article_identifiers_other.data) if article_ids: bibjson.persistent_identifier_scheme = article_ids if (form.download_statistics.data and form.download_statistics.data != 'None') or form.download_statistics_url.data: bibjson.set_article_statistics( form.download_statistics_url.data, forms.interpret_special(form.download_statistics.data)) if form.first_fulltext_oa_year.data: bibjson.set_oa_start(year=form.first_fulltext_oa_year.data) # checkboxes fulltext_format = forms.interpret_other( form.fulltext_format.data, form.fulltext_format_other.data) if fulltext_format: bibjson.format = fulltext_format if form.keywords.data: bibjson.set_keywords(form.keywords.data) # tag list field if form.languages.data: bibjson.set_language(form.languages.data ) # select multiple field - gives a list back bibjson.add_url(form.editorial_board_url.data, urltype='editorial_board') if form.review_process.data or form.review_process_url.data: bibjson.set_editorial_review(form.review_process.data, form.review_process_url.data) bibjson.add_url(form.aims_scope_url.data, urltype='aims_scope') bibjson.add_url(form.instructions_authors_url.data, urltype='author_instructions') if (form.plagiarism_screening.data and form.plagiarism_screening.data != 'None') or form.plagiarism_screening_url.data: bibjson.set_plagiarism_detection( form.plagiarism_screening_url.data, has_detection=forms.interpret_special( form.plagiarism_screening.data)) if form.publication_time.data: bibjson.publication_time = form.publication_time.data bibjson.add_url(form.oa_statement_url.data, urltype='oa_statement') license_type = forms.interpret_other(form.license.data, form.license_other.data) if forms.interpret_special(license_type): # "None" and "False" as strings like they come out of the WTForms processing) # would get interpreted correctly by this check, so "None" licenses should not appear if license_type in licenses: by = licenses[license_type]['BY'] nc = licenses[license_type]['NC'] nd = licenses[license_type]['ND'] sa = licenses[license_type]['SA'] license_title = licenses[license_type]['title'] elif form.license_checkbox.data: by = True if 'BY' in form.license_checkbox.data else False nc = True if 'NC' in form.license_checkbox.data else False nd = True if 'ND' in form.license_checkbox.data else False sa = True if 'SA' in form.license_checkbox.data else False license_title = license_type else: by = None nc = None nd = None sa = None license_title = license_type bibjson.set_license( license_title, license_type, url=form.license_url.data, open_access=forms.interpret_special(form.open_access.data), by=by, nc=nc, nd=nd, sa=sa, embedded=forms.interpret_special(form.license_embedded.data), embedded_example_url=form.license_embedded_url.data) # checkboxes deposit_policies = forms.interpret_special( form.deposit_policy.data) # need empty list if it's just "None" deposit_policies = forms.interpret_other( deposit_policies, form.deposit_policy_other.data) if deposit_policies: bibjson.deposit_policy = deposit_policies if form.copyright.data and form.copyright.data != 'None': holds_copyright = forms.interpret_other( forms.interpret_special(form.copyright.data), form.copyright_other.data) bibjson.set_author_copyright(form.copyright_url.data, holds_copyright=holds_copyright) if form.publishing_rights.data and form.publishing_rights.data != 'None': publishing_rights = forms.interpret_other( forms.interpret_special(form.publishing_rights.data), form.publishing_rights_other.data) bibjson.set_author_publishing_rights( form.publishing_rights_url.data, holds_rights=publishing_rights) # need to copy over the notes from the existing journal object, if any, otherwise # the dates on all the notes will get reset to right now (i.e. last_updated) # since the journal object we're creating in this xwalk is a new, empty one journal.set_notes(existing_journal.notes()) # generate index of notes, just the text curnotes = [] for curnote in journal.notes(): curnotes.append(curnote['note']) # add any new notes formnotes = [] for formnote in form.notes.data: if formnote['note']: if formnote['note'] not in curnotes and formnote["note"] != "": journal.add_note(formnote['note']) # also generate another text index of notes, this time an index of the form notes formnotes.append(formnote['note']) if current_user.has_role("delete_note"): # delete all notes not coming back from the form, means they've been deleted # also if one of the saved notes is completely blank, delete it for curnote in journal.notes()[:]: if not curnote['note'] or curnote['note'] not in formnotes: journal.remove_note(curnote) new_subjects = [] for code in form.subject.data: sobj = { "scheme": 'LCC', "term": lcc.lookup_code(code), "code": code } new_subjects.append(sobj) bibjson.set_subjects(new_subjects) owner = form.owner.data.strip() if owner: journal.set_owner(owner) editor_group = form.editor_group.data.strip() if editor_group: journal.set_editor_group(editor_group) editor = form.editor.data.strip() if editor: journal.set_editor(editor) # old fields - only create them in the journal record if the values actually exist # need to use interpret_special in the test condition in case 'None' comes back from the form if getattr(form, 'author_pays', None): if forms.interpret_special(form.author_pays.data): bibjson.author_pays = form.author_pays.data if getattr(form, 'author_pays_url', None): if forms.interpret_special(form.author_pays_url.data): bibjson.author_pays_url = form.author_pays_url.data if getattr(form, 'oa_end_year', None): if forms.interpret_special(form.oa_end_year.data): bibjson.set_oa_end(form.oa_end_year.data) return journal
class TestCreateOrUpdateArticle(DoajTestCase): def setUp(self): super(TestCreateOrUpdateArticle, self).setUp() self.publisher = Account() self.publisher.add_role("publisher") self.publisher.save(blocking=True) self.admin = Account() self.admin.add_role("admin") self.admin.save(blocking=True) sources = JournalFixtureFactory.make_many_journal_sources(2, True) self.journal1 = Journal(**sources[0]) self.journal1.set_owner(self.publisher.id) jbib1 = self.journal1.bibjson() jbib1.add_identifier(jbib1.P_ISSN, "1111-1111") jbib1.add_identifier(jbib1.E_ISSN, "2222-2222") self.journal1.save(blocking=True) self.publisher.add_journal(self.journal1) self.journal2 = Journal(**sources[1]) jbib2 = self.journal2.bibjson() jbib2.add_identifier(jbib2.P_ISSN, "1234-5678") jbib2.add_identifier(jbib2.E_ISSN, "9876-5432") self.journal2.save(blocking=True) self.article10 = Article(**ArticleFixtureFactory.make_article_source( pissn="1111-1111", eissn="2222-2222", doi="10.0000/article-10", fulltext="https://www.article10.com")) self.article10.set_id("articleid10") self.article10.save(blocking=True) self.article11 = Article(**ArticleFixtureFactory.make_article_source( pissn="1111-1111", eissn="2222-2222", doi="10.0000/article-11", fulltext="https://www.article11.com")) self.article11.set_id("articleid11") self.article11.save(blocking=True) self.article2 = Article(**ArticleFixtureFactory.make_article_source( pissn="1234-5678", eissn="9876-5432", doi="10.0000/article-2", fulltext="https://www.article2.com")) self.article2.set_id("articleid2") self.article2.save(blocking=True) def tearDown(self): super(TestCreateOrUpdateArticle, self).tearDown() def test_00_no_doi_and_url_changed(self): ba = self.article10.bibjson() ba.title = "Updated Article" # try for admin resp = ArticleService.create_article( self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) assert resp["success"] == 1, "expected 1 updated, received: {}".format( resp) assert resp["update"] == 1, "expected 1 updated, received: {}".format( resp) assert resp["new"] == 0, "expected 1 updated, received: {}".format( resp) assert self.article10.bibjson().title == "Updated Article", "Expected `Updated Article`, received: {}" \ .format(self.article10.bibjson().title) ba.title = "Updated 2nd time" # try for publisher resp = ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) assert resp["success"] == 1, "expected 1 updated, received: {}".format( resp) assert resp["update"] == 1, "expected 1 updated, received: {}".format( resp) assert resp["new"] == 0, "expected 1 updated, received: {}".format( resp) assert self.article10.bibjson().title == "Updated 2nd time", "Expected `Updated 2nd time`, received: {}" \ .format(self.article10.bibjson().title) def test_01_new_doi_new_url(self): ba = self.article10.bibjson() ba.remove_identifiers(ba.DOI) ba.remove_urls(ba.FULLTEXT) ba.add_identifier(ba.DOI, "10.0000/NEW") ba.add_url(ba.FULLTEXT, "https://www.UPDATED.com") #for publisher resp = ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) assert resp["success"] == 1, "expected 1 new, received: {}".format( resp) assert resp["update"] == 0, "expected 1 new, received: {}".format(resp) assert resp["new"] == 1, "expected 1 new, received: {}".format(resp) #for admin resp = ArticleService.create_article( self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) assert resp["success"] == 1, "expected 1 new, received: {}".format( resp) assert resp["update"] == 1, "expected 1 new, received: {}".format(resp) assert resp["new"] == 0, "expected 1 new, received: {}".format(resp) def test_02_old_doi_existing_url_admin(self): ba = self.article10.bibjson() ba.remove_urls(ba.FULLTEXT) # check for url from other article owned by the same publisher ba.add_url(self.article11.bibjson().get_single_url(ba.FULLTEXT), ba.FULLTEXT) # try as a publisher with self.assertRaises(ArticleMergeConflict): ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) # try as an admin with self.assertRaises(ArticleMergeConflict): ArticleService.create_article(self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) # check for url from other article owned by someone else ba.remove_urls(ba.FULLTEXT) ba.add_url(self.article2.bibjson().get_single_url(ba.FULLTEXT), ba.FULLTEXT) # try as a publisher with self.assertRaises(ArticleMergeConflict): ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) # try as an admin with self.assertRaises(ArticleMergeConflict): ArticleService.create_article(self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) def test_03_existing_doi_old_url_admin(self): ba = self.article10.bibjson() ba.remove_identifiers(ba.DOI) # check for DOI from other article owned by the same publisher ba.add_identifier(ba.DOI, "10.0000/article-11") # try as a publisher with self.assertRaises(ArticleMergeConflict): ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) # try as an admin with self.assertRaises(ArticleMergeConflict): ArticleService.create_article(self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) ba.remove_identifiers(ba.DOI) # check for DOI from other article owned by someone else ba.add_identifier(ba.DOI, "10.0000/article-2") # try as a publisher with self.assertRaises(ArticleMergeConflict): ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) # try as an admin with self.assertRaises(ArticleMergeConflict): ArticleService.create_article(self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) def test_04_old_doi_new_url(self): ba = self.article10.bibjson() ba.remove_urls(ba.FULLTEXT) ba.add_url("https://updated.com", ba.FULLTEXT) # try as publisher with self.assertRaises(DuplicateArticleException): ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) # try as an admin resp = ArticleService.create_article( self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) assert resp["success"] == 1, "expected 1 updated, received: {}".format( resp) assert resp["update"] == 1, "expected 1 updated, received: {}".format( resp) assert resp["new"] == 0, "expected 1 new, received: {}".format(resp) assert self.article10.get_normalised_fulltext( ) == "//updated.com", "expected //updated.com, received: {}".format( self.article10.get_normalised_fulltext()) def test_05_new_doi_old_url(self): ba = self.article10.bibjson() ba.remove_identifiers(ba.DOI) ba.add_identifier(ba.DOI, "10.0000/article-UPDATED") # try as publisher with self.assertRaises(DuplicateArticleException): ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) # try as an admin resp = ArticleService.create_article( self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) assert resp["success"] == 1, "expected 1 updated, received: {}".format( resp) assert resp["update"] == 1, "expected 1 updated, received: {}".format( resp) assert resp["new"] == 0, "expected 1 updated, received: {}".format( resp) assert self.article10.get_normalised_doi() == "10.0000/article-UPDATED", \ "expected 10.0000/article-UPDATED, received: {}".format( self.article10.get_normalised_fulltext()) def test_06_existing_doi_new_url(self): ba = self.article10.bibjson() ba.remove_urls(ba.FULLTEXT) ba.add_url("https://updated.com", ba.FULLTEXT) # check for doi from other article of the same publisher ba.remove_identifiers(ba.DOI) ba.add_identifier(ba.DOI, self.article11.bibjson().get_one_identifier(ba.DOI)) # try as publisher with self.assertRaises(DuplicateArticleException): ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) with self.assertRaises(DuplicateArticleException): ArticleService.create_article(self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id) def test_07_new_doi_existing_url(self): ba = self.article10.bibjson() ba.remove_urls(ba.FULLTEXT) ba.add_url(self.article11.bibjson().get_single_url(ba.FULLTEXT), ba.FULLTEXT) # check for doi from other article of the same publisher ba.remove_identifiers(ba.DOI) ba.add_identifier(ba.DOI, "10.0000/article-UPDATED") # try as publisher with self.assertRaises(DuplicateArticleException): ArticleService.create_article(self=ArticleService(), account=self.publisher, article=self.article10) # try as an admin with self.assertRaises(DuplicateArticleException): ArticleService.create_article(self=ArticleService(), account=self.admin, article=self.article10, update_article_id=self.article10.id)
def test_02_application_2_journal(self, name, application_type, manual_update_arg, app_key_properties, current_journal, raises): # set up for the test ######################################### cj = None has_seal = bool(randint(0, 1)) application = None if application_type == "present": application = Suggestion(**ApplicationFixtureFactory.make_application_source()) application.set_id(application.makeid()) application.remove_contacts() application.remove_editor_group() application.remove_editor() application.remove_owner() application.remove_current_journal() application.remove_notes() if app_key_properties == "yes": application.add_contact("Application", "*****@*****.**") application.set_editor_group("appeditorgroup") application.set_editor("appeditor") application.set_owner("appowner") application.set_seal(has_seal) application.add_note("Application Note") if current_journal == "present": journal = Journal(**JournalFixtureFactory.make_journal_source()) journal.remove_contacts() journal.add_contact("Journal", "*****@*****.**") journal.set_editor_group("journaleditorgroup") journal.set_editor("journaleditor") journal.set_owner("journalowner") journal.remove_current_application() journal.remove_notes() journal.add_note("Journal Note") journal.save(blocking=True) application.set_current_journal(journal.id) cj = journal elif current_journal == "missing": application.set_current_journal("123456789987654321") manual_update = None if manual_update_arg == "true": manual_update = True elif manual_update_arg == "false": manual_update = False # execute the test ######################################## svc = DOAJ.applicationService() if raises is not None and raises != "": with self.assertRaises(EXCEPTIONS[raises]): svc.application_2_journal(application, manual_update) else: journal = svc.application_2_journal(application, manual_update) # check the result ###################################### assert journal is not None assert isinstance(journal, Journal) assert journal.is_in_doaj() is True jbj = journal.bibjson().data del jbj["active"] assert jbj == application.bibjson().data if current_journal == "present": assert len(journal.related_applications) == 3 else: assert len(journal.related_applications) == 1 related = journal.related_application_record(application.id) assert related is not None if manual_update_arg == "true": assert journal.last_manual_update is not None and journal.last_manual_update != "1970-01-01T00:00:00Z" if app_key_properties == "yes": contacts = journal.contacts() assert len(contacts) == 1 assert contacts[0].get("name") == "Application" assert contacts[0].get("email") == "*****@*****.**" assert journal.editor_group == "appeditorgroup" assert journal.editor == "appeditor" assert journal.owner == "appowner" assert journal.has_seal() == has_seal if current_journal == "present": assert len(journal.notes) == 2 else: assert len(journal.notes) == 1 elif app_key_properties == "no": if current_journal == "present": contacts = journal.contacts() assert len(contacts) == 1 assert contacts[0].get("name") == "Journal" assert contacts[0].get("email") == "*****@*****.**" assert journal.editor_group == "journaleditorgroup" assert journal.editor == "journaleditor" assert journal.owner == "journalowner" assert journal.has_seal() == has_seal assert len(journal.notes) == 2 elif current_journal == "none" or current_journal == "missing": contacts = journal.contacts() assert len(contacts) == 0 assert journal.editor_group is None assert journal.editor is None assert journal.owner is None assert journal.has_seal() == has_seal assert len(journal.notes) == 1 if current_journal == "present": assert cj.id == journal.id assert cj.created_date == journal.created_date
def test_01_discover_duplicates(self, name, kwargs): article_arg = kwargs.get("article") owner_arg = kwargs.get("owner") article_doi_arg = kwargs.get("article_doi") doi_duplicate_arg = kwargs.get("doi_duplicate") article_fulltext_arg = kwargs.get("article_fulltext") fulltext_duplicate_arg = kwargs.get("fulltext_duplicate") articles_by_doi_arg = kwargs.get("articles_by_doi") articles_by_fulltext_arg = kwargs.get("articles_by_fulltext") raises_arg = kwargs.get("raises") raises = EXCEPTIONS.get(raises_arg) ############################################### ## set up owner = None if owner_arg != "none": owner = Account(**AccountFixtureFactory.make_publisher_source()) owner_id = None if owner is not None: owner_id = owner.id # create a journal for the owner if owner_arg not in ["none"]: source = JournalFixtureFactory.make_journal_source(in_doaj=True) journal = Journal(**source) journal.set_owner(owner.id) journal.bibjson().remove_identifiers() journal.bibjson().add_identifier("eissn", "1234-5678") journal.bibjson().add_identifier("pissn", "9876-5432") journal.save(blocking=True) # determine what we need to load into the index article_ids = [] aids_block = [] if owner_arg not in ["none", "no_articles"]: for i, ident in enumerate(IDENTS): the_doi = ident["doi"] if doi_duplicate_arg == "padded": the_doi = " " + the_doi + " " elif doi_duplicate_arg == "prefixed": the_doi = "https://dx.doi.org/" + the_doi the_fulltext = ident["fulltext"] if article_fulltext_arg != "invalid": if fulltext_duplicate_arg == "padded": the_fulltext = " http:" + the_fulltext elif fulltext_duplicate_arg == "http": the_fulltext = "http:" + the_fulltext elif fulltext_duplicate_arg == "https": the_fulltext = "https:" + the_fulltext else: the_fulltext = "http:" + the_fulltext source = ArticleFixtureFactory.make_article_source(eissn="1234-5678", pissn="9876-5432", doi=the_doi, fulltext=the_fulltext) article = Article(**source) article.set_id() article.save() article_ids.append(article.id) aids_block.append((article.id, article.last_updated)) # generate our incoming article article = None doi = None fulltext = None if article_arg == "yes": eissn = "1234=5678" # one matching pissn = "6789-1234" # the other not - issn matches are not relevant to this test if article_doi_arg in ["yes", "padded"]: doi = "10.1234/abc/11" if doi_duplicate_arg in ["yes", "padded"]: doi = IDENTS[0]["doi"] if article_doi_arg == "padded": doi = " doi:" + doi + " " elif article_doi_arg in ["invalid"]: doi = IDENTS[-1]["doi"] if article_fulltext_arg in ["yes", "padded", "https"]: fulltext = "//example.com/11" if fulltext_duplicate_arg in ["yes", "padded", "https"]: fulltext = IDENTS[0]["fulltext"] if fulltext_duplicate_arg == "padded": fulltext = " http:" + fulltext + " " elif fulltext_duplicate_arg == "https": fulltext = "https:" + fulltext else: fulltext = "http:" + fulltext elif article_fulltext_arg == "invalid": fulltext = IDENTS[-1]["fulltext"] source = ArticleFixtureFactory.make_article_source(eissn=eissn, pissn=pissn, doi=doi, fulltext=fulltext) article = Article(**source) # we need to do this if doi or fulltext are none, because the factory will set a default if we don't # provide them if doi is None: article.bibjson().remove_identifiers("doi") if fulltext is None: article.bibjson().remove_urls("fulltext") article.set_id() Article.blockall(aids_block) ########################################################### # Execution svc = DOAJ.articleService() if raises is not None: with self.assertRaises(raises): svc.discover_duplicates(article, owner_id) else: possible_articles = svc.discover_duplicates(article, owner_id) if articles_by_doi_arg == "yes": assert "doi" in possible_articles assert len(possible_articles["doi"]) == 1 # if this is the "invalid" doi, then we expect it to match the final article, otherwise match the first if article_doi_arg == "invalid": assert possible_articles["doi"][0].id == article_ids[-1] else: assert possible_articles["doi"][0].id == article_ids[0] else: if possible_articles is not None: assert "doi" not in possible_articles if articles_by_fulltext_arg == "yes": assert "fulltext" in possible_articles assert len(possible_articles["fulltext"]) == 1 # if this is the "invalid" fulltext url, then we expect it to match the final article, otherwise match the first if article_fulltext_arg == "invalid": assert possible_articles["fulltext"][0].id == article_ids[-1] else: assert possible_articles["fulltext"][0].id == article_ids[0] else: if possible_articles is not None: assert "fulltext" not in possible_articles
def test_02_application_2_journal(self, name, application_type, manual_update_arg, app_key_properties, current_journal, raises): # set up for the test ######################################### cj = None has_seal = bool(randint(0, 1)) application = None if application_type == "present": application = Suggestion( **ApplicationFixtureFactory.make_application_source()) application.set_id(application.makeid()) application.remove_contacts() application.remove_editor_group() application.remove_editor() application.remove_owner() application.remove_current_journal() application.remove_notes() if app_key_properties == "yes": application.add_contact("Application", "*****@*****.**") application.set_editor_group("appeditorgroup") application.set_editor("appeditor") application.set_owner("appowner") application.set_seal(has_seal) application.add_note("Application Note") if current_journal == "present": journal = Journal( **JournalFixtureFactory.make_journal_source()) journal.remove_contacts() journal.add_contact("Journal", "*****@*****.**") journal.set_editor_group("journaleditorgroup") journal.set_editor("journaleditor") journal.set_owner("journalowner") journal.remove_current_application() journal.remove_notes() journal.add_note("Journal Note") journal.save(blocking=True) application.set_current_journal(journal.id) cj = journal elif current_journal == "missing": application.set_current_journal("123456789987654321") manual_update = None if manual_update_arg == "true": manual_update = True elif manual_update_arg == "false": manual_update = False # execute the test ######################################## svc = DOAJ.applicationService() if raises is not None and raises != "": with self.assertRaises(EXCEPTIONS[raises]): svc.application_2_journal(application, manual_update) else: journal = svc.application_2_journal(application, manual_update) # check the result ###################################### assert journal is not None assert isinstance(journal, Journal) assert journal.is_in_doaj() is True jbj = journal.bibjson().data del jbj["active"] assert jbj == application.bibjson().data if current_journal == "present": assert len(journal.related_applications) == 3 else: assert len(journal.related_applications) == 1 related = journal.related_application_record(application.id) assert related is not None if manual_update_arg == "true": assert journal.last_manual_update is not None and journal.last_manual_update != "1970-01-01T00:00:00Z" if app_key_properties == "yes": contacts = journal.contacts() assert len(contacts) == 1 assert contacts[0].get("name") == "Application" assert contacts[0].get("email") == "*****@*****.**" assert journal.editor_group == "appeditorgroup" assert journal.editor == "appeditor" assert journal.owner == "appowner" assert journal.has_seal() == has_seal if current_journal == "present": assert len(journal.notes) == 2 else: assert len(journal.notes) == 1 elif app_key_properties == "no": if current_journal == "present": contacts = journal.contacts() assert len(contacts) == 1 assert contacts[0].get("name") == "Journal" assert contacts[0].get("email") == "*****@*****.**" assert journal.editor_group == "journaleditorgroup" assert journal.editor == "journaleditor" assert journal.owner == "journalowner" assert journal.has_seal() == has_seal assert len(journal.notes) == 2 elif current_journal == "none" or current_journal == "missing": contacts = journal.contacts() assert len(contacts) == 0 assert journal.editor_group is None assert journal.editor is None assert journal.owner is None assert journal.has_seal() == has_seal assert len(journal.notes) == 1 if current_journal == "present": assert cj.id == journal.id assert cj.created_date == journal.created_date