def test_03b_create_update_request_fail(self): # update request target not found with self.assertRaises(Api404Error): data = ApplicationFixtureFactory.incoming_application() publisher = models.Account( **AccountFixtureFactory.make_publisher_source()) try: a = ApplicationsCrudApi.create(data, publisher) except Api404Error as e: raise # if a formcontext exception is raised on finalise publisher = models.Account( **AccountFixtureFactory.make_publisher_source()) journal = models.Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) journal.set_id(journal.makeid()) journal.set_owner(publisher.id) journal.save(blocking=True) formcontext.FormContext.finalise = mock_finalise_exception with self.assertRaises(Api400Error): data = ApplicationFixtureFactory.incoming_application() data["admin"]["current_journal"] = journal.id try: a = ApplicationsCrudApi.create(data, publisher) except Api400Error as e: assert str(e) == "test exception" raise formcontext.FormContext.finalise = self.old_finalise # validation fails on the formcontext publisher = models.Account( **AccountFixtureFactory.make_publisher_source()) journal = models.Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) journal.set_id(journal.makeid()) journal.set_owner(publisher.id) journal.save(blocking=True) IncomingApplication.custom_validate = mock_custom_validate_always_pass with self.assertRaises(Api400Error): data = ApplicationFixtureFactory.incoming_application() # duff submission charges url should trip the validator data["bibjson"]["submission_charges_url"] = "not a url!" data["admin"]["current_journal"] = journal.id try: a = ApplicationsCrudApi.create(data, publisher) except Api400Error as e: raise
def test_01_journal2questions(self): journal = models.Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) journal.prep() q_and_a = Journal2QuestionXwalk.journal2question(journal) answers = [str(x[1]) for x in q_and_a] expected = JournalFixtureFactory.question_answers() for i in range(len(answers)): a = answers[i] if a != expected[i]: print(("{c} = {a} | {b}".format(a=a, b=expected[i], c=q_and_a[i]))) assert answers == expected
def test_03_withdraw(self): acc = models.Account() acc.set_name("testuser") ctx = self._make_and_push_test_context(acc=acc) sources = JournalFixtureFactory.make_many_journal_sources(10, in_doaj=True) ids = [] articles = [] for source in sources: j = models.Journal(**source) j.save() ids.append(j.id) pissn = j.bibjson().get_identifiers(j.bibjson().P_ISSN) eissn = j.bibjson().get_identifiers(j.bibjson().E_ISSN) asource = ArticleFixtureFactory.make_article_source(pissn=pissn[0], eissn=eissn[0], with_id=False) a = models.Article(**asource) a.save() articles.append(a.id) time.sleep(2) change_in_doaj(ids, False) time.sleep(2) for id in ids: j = models.Journal.pull(id) assert j.is_in_doaj() is False for id in articles: a = models.Article.pull(id) assert a.is_in_doaj() is False ctx.pop()
def test_02_reinstate_task(self): sources = JournalFixtureFactory.make_many_journal_sources(10, in_doaj=False) ids = [] articles = [] for source in sources: j = models.Journal(**source) j.save() ids.append(j.id) pissn = j.bibjson().get_identifiers(j.bibjson().P_ISSN) eissn = j.bibjson().get_identifiers(j.bibjson().E_ISSN) asource = ArticleFixtureFactory.make_article_source(pissn=pissn[0], eissn=eissn[0], with_id=False, in_doaj=False) a = models.Article(**asource) a.save() articles.append(a.id) time.sleep(2) job = SetInDOAJBackgroundTask.prepare("testuser", journal_ids=ids, in_doaj=True) SetInDOAJBackgroundTask.submit(job) time.sleep(2) for id in ids: j = models.Journal.pull(id) assert j.is_in_doaj() is True for id in articles: a = models.Article.pull(id) assert a.is_in_doaj() is True
def test_02_retrieve_public_journal_success(self): # set up all the bits we need data = JournalFixtureFactory.make_journal_source(in_doaj=True, include_obsolete_fields=True) j = models.Journal(**data) j.save() time.sleep(2) a = JournalsCrudApi.retrieve(j.id, account=None) # check that we got back the object we expected assert isinstance(a, OutgoingJournal) assert a.id == j.id # it should also work if we're logged in with the owner or another user # owner first account = models.Account() account.set_id(j.owner) account.set_name("Tester") account.set_email("*****@*****.**") a = JournalsCrudApi.retrieve(j.id, account) assert isinstance(a, OutgoingJournal) assert a.id == j.id # try with another account not_owner = models.Account() not_owner.set_id("asdklfjaioefwe") a = JournalsCrudApi.retrieve(j.id, not_owner) assert isinstance(a, OutgoingJournal) assert a.id == j.id
def test_03_create_articles_fail(self): # if the account is dud with self.assertRaises(Api401Error): data = ArticleFixtureFactory.make_incoming_api_article() dataset = [data] * 10 ids = ArticlesBulkApi.create(dataset, None) # check that the index is empty, as none of them should have been made all = [x for x in models.Article.iterall()] assert len(all) == 0 # if the data is bust with self.assertRaises(Api400Error): account = models.Account() account.set_id("test") account.set_name("Tester") account.set_email("*****@*****.**") # add a journal to the account journal = models.Journal(**JournalFixtureFactory.make_journal_source(in_doaj=True)) journal.set_owner(account.id) journal.save() time.sleep(1) dataset = dataset[:5] + [{"some" : {"junk" : "data"}}] + dataset[5:] ids = ArticlesBulkApi.create(dataset, account) # check that the index is empty, as none of them should have been made all = [x for x in models.Article.iterall()] assert len(all) == 0
def test_02_create_duplicate_articles(self): # set up all the bits we need - 10 articles data = ArticleFixtureFactory.make_incoming_api_article() dataset = [data] * 10 # create an account that we'll do the create as account = models.Account() account.set_id("test") account.set_name("Tester") account.set_email("*****@*****.**") # add a journal to the account journal = models.Journal(**JournalFixtureFactory.make_journal_source(in_doaj=True)) journal.set_owner(account.id) journal.save() time.sleep(2) # call create on the object (which will save it to the index) with self.assertRaises(Api400Error): ids = ArticlesBulkApi.create(dataset, account) time.sleep(2) with self.assertRaises(ESMappingMissingError): all_articles = models.Article.all()
def test_02_oai_journals(self): """test if the OAI-PMH journal feed returns records and only displays journals accepted in DOAJ""" journal_sources = JournalFixtureFactory.make_many_journal_sources( 2, in_doaj=True) j_public = models.Journal(**journal_sources[0]) j_public.save(blocking=True) j_private = models.Journal(**journal_sources[1]) j_private.set_in_doaj(False) j_private.save(blocking=True) with self.app_test.test_request_context(): with self.app_test.test_client() as t_client: resp = t_client.get( url_for('oaipmh.oaipmh', verb='ListRecords', metadataPrefix='oai_dc')) assert resp.status_code == 200 t = etree.fromstring(resp.data) records = t.xpath('/oai:OAI-PMH/oai:ListRecords', namespaces=self.oai_ns) # Check we only have one journal returned assert len(records[0].xpath('//oai:record', namespaces=self.oai_ns)) == 1 # Check we have the correct journal assert records[0].xpath( '//dc:title', namespaces=self.oai_ns)[0].text == j_public.bibjson().title
def get_journal(cls, specs): journals = [] for spec in specs: source = JournalFixtureFactory.make_journal_source(in_doaj=True) j = Journal(**source) bj = j.bibjson() bj.title = spec.get("title", "Journal Title") bj.remove_identifiers() if "pissn" in spec: bj.add_identifier(bj.P_ISSN, spec.get("pissn")) if "eissn" in spec: bj.add_identifier(bj.E_ISSN, spec.get("eissn")) spec["instance"] = j journals.append(spec) def mock(self): bibjson = self.bibjson() # first, get the ISSNs associated with the record pissns = bibjson.get_identifiers(bibjson.P_ISSN) eissns = bibjson.get_identifiers(bibjson.E_ISSN) for j in journals: if j["pissn"] in pissns and j["eissn"] in eissns: return j["instance"] return mock
def test_has_permissions(self): journal_source = JournalFixtureFactory.make_journal_source() journal1 = Journal(**journal_source) publisher_owner_src = AccountFixtureFactory.make_publisher_source() publisher_owner = Account(**publisher_owner_src) publisher_stranged_src = AccountFixtureFactory.make_publisher_source() publisher_stranged = Account(**publisher_stranged_src) admin_src = AccountFixtureFactory.make_managing_editor_source() admin = Account(**admin_src) journal1.set_owner(publisher_owner) journal1.save(blocking=True) eissn = journal1.bibjson().get_one_identifier("eissn") pissn = journal1.bibjson().get_one_identifier("pissn") art_source = ArticleFixtureFactory.make_article_source(eissn=eissn, pissn=pissn) article = Article(**art_source) assert self.svc.has_permissions(publisher_stranged, article, False) assert self.svc.has_permissions(publisher_owner, article, True) assert self.svc.has_permissions(admin, article, True) failed_result = self.svc.has_permissions(publisher_stranged, article, True) assert failed_result == {'success': 0, 'fail': 1, 'update': 0, 'new': 0, 'shared': [], 'unowned': [pissn, eissn], 'unmatched': []}, "received: {}".format(failed_result)
def test_06_anonymise_admin_empty_notes(self): journal_src = JournalFixtureFactory.make_journal_source() journal_src['admin'] = { 'owner': 'testuser', 'editor': 'testeditor', 'contact': [{ 'email': '*****@*****.**', 'name': 'Tester Tester' }], 'notes': [] } journal = models.Journal(**journal_src) with freeze_time("2017-02-23"): ar = anon_export._anonymise_admin(journal) assert ar.data['admin'] == { 'owner': 'testuser', 'editor': 'testeditor', 'contact': [{ 'email': '*****@*****.**', 'name': 'Ryan Gallagher' }], 'notes': [] }, ar['admin']
def test_02_create_duplicate_articles(self): # set up all the bits we need - 10 articles data = ArticleFixtureFactory.make_incoming_api_article() dataset = [data] * 10 # create an account that we'll do the create as account = models.Account() account.set_id("test") account.set_name("Tester") account.set_email("*****@*****.**") # add a journal to the account journal = models.Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) journal.set_owner(account.id) journal.save() time.sleep(2) # call create on the object (which will save it to the index) with self.assertRaises(Api400Error): ids = ArticlesBulkApi.create(dataset, account) time.sleep(2) with self.assertRaises(ESMappingMissingError): all_articles = models.Article.all()
def test_03_create_articles_fail(self): # if the account is dud with self.assertRaises(Api401Error): data = ArticleFixtureFactory.make_incoming_api_article() dataset = [data] * 10 ids = ArticlesBulkApi.create(dataset, None) # check that the index is empty, as none of them should have been made all = [x for x in models.Article.iterall()] assert len(all) == 0 # if the data is bust with self.assertRaises(Api400Error): account = models.Account() account.set_id("test") account.set_name("Tester") account.set_email("*****@*****.**") # add a journal to the account journal = models.Journal( **JournalFixtureFactory.make_journal_source(in_doaj=True)) journal.set_owner(account.id) journal.save() time.sleep(1) dataset = dataset[:5] + [{"some": {"junk": "data"}}] + dataset[5:] ids = ArticlesBulkApi.create(dataset, account) # check that the index is empty, as none of them should have been made all = [x for x in models.Article.iterall()] assert len(all) == 0
def test_07_retrieve_article_fail(self): # set up all the bits we need # add a journal to the account account = models.Account() account.set_id('test') account.set_name("Tester") account.set_email("*****@*****.**") journal = models.Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) journal.set_owner(account.id) journal.save() time.sleep(1) data = ArticleFixtureFactory.make_article_source() data['admin']['in_doaj'] = False ap = models.Article(**data) ap.save() time.sleep(1) # should fail when no user and in_doaj is False with self.assertRaises(Api401Error): a = ArticlesCrudApi.retrieve(ap.id, None) # wrong user account = models.Account() account.set_id("asdklfjaioefwe") with self.assertRaises(Api404Error): a = ArticlesCrudApi.retrieve(ap.id, account) # non-existant article account = models.Account() account.set_id(ap.id) with self.assertRaises(Api404Error): a = ArticlesCrudApi.retrieve("ijsidfawefwefw", account)
def test_01_withdraw_task(self): sources = JournalFixtureFactory.make_many_journal_sources(10, in_doaj=True) ids = [] articles = [] for source in sources: j = models.Journal(**source) j.save() ids.append(j.id) pissn = j.bibjson().get_identifiers(j.bibjson().P_ISSN) eissn = j.bibjson().get_identifiers(j.bibjson().E_ISSN) asource = ArticleFixtureFactory.make_article_source(pissn=pissn[0], eissn=eissn[0], with_id=False) a = models.Article(**asource) a.save() articles.append(a.id) time.sleep(2) job = SetInDOAJBackgroundTask.prepare("testuser", journal_ids=ids, in_doaj=False) SetInDOAJBackgroundTask.submit(job) time.sleep(2) for id in ids: j = models.Journal.pull(id) assert j.is_in_doaj() is False for id in articles: a = models.Article.pull(id) assert a.is_in_doaj() is False
def test_15_create_application_update_request_dryrun(self): # set up all the bits we need data = ApplicationFixtureFactory.incoming_application() account = models.Account() account.set_id("test") account.set_name("Tester") account.set_email("*****@*****.**") account.add_role("publisher") journal = models.Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) journal.bibjson().remove_identifiers() journal.bibjson().add_identifier(journal.bibjson().E_ISSN, "9999-8888") journal.bibjson().add_identifier(journal.bibjson().P_ISSN, "7777-6666") journal.bibjson().title = "not changed" journal.set_id(data["admin"]["current_journal"]) journal.set_owner(account.id) journal.save(blocking=True) # call create on the object, with the dry_run flag set a = ApplicationsCrudApi.create(data, account, dry_run=True) time.sleep(2) # now check that the application index remains empty ss = [x for x in models.Suggestion.iterall()] assert len(ss) == 0
def test_15_create_application_update_request_dryrun(self): # set up all the bits we need data = ApplicationFixtureFactory.incoming_application() account = models.Account() account.set_id("test") account.set_name("Tester") account.set_email("*****@*****.**") account.add_role("publisher") journal = models.Journal(**JournalFixtureFactory.make_journal_source(in_doaj=True)) journal.bibjson().remove_identifiers() journal.bibjson().add_identifier(journal.bibjson().E_ISSN, "9999-8888") journal.bibjson().add_identifier(journal.bibjson().P_ISSN, "7777-6666") journal.bibjson().title = "not changed" journal.set_id(data["admin"]["current_journal"]) journal.set_owner(account.id) journal.save(blocking=True) # call create on the object, with the dry_run flag set a = ApplicationsCrudApi.create(data, account, dry_run=True) time.sleep(2) # now check that the application index remains empty ss = [x for x in models.Suggestion.iterall()] assert len(ss) == 0
def test_13_create_application_update_request_success(self): # set up all the bits we need data = ApplicationFixtureFactory.incoming_application() account = models.Account() account.set_id("test") account.set_name("Tester") account.set_email("*****@*****.**") account.add_role("publisher") account.save(blocking=True) journal = models.Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) journal.bibjson().remove_identifiers() journal.bibjson().add_identifier(journal.bibjson().E_ISSN, "9999-8888") journal.bibjson().add_identifier(journal.bibjson().P_ISSN, "7777-6666") journal.bibjson().title = "not changed" journal.set_id(data["admin"]["current_journal"]) journal.set_owner(account.id) journal.save(blocking=True) # call create on the object (which will save it to the index) a = ApplicationsCrudApi.create(data, account) # check that it got created with the right properties assert isinstance(a, models.Suggestion) assert a.id != "ignore_me" assert a.created_date != "2001-01-01T00:00:00Z" assert a.last_updated != "2001-01-01T00:00:00Z" assert a.suggester.get( "name" ) == "Tester" # The suggester should be the owner of the existing journal assert a.suggester.get("email") == "*****@*****.**" assert a.owner == "test" assert a.suggested_on is not None assert a.bibjson().issns() == [ "9999-8888", "7777-6666" ] or a.bibjson().issns() == ["7777-6666", "9999-8888"] assert a.bibjson().title == "not changed" # also, because it's a special case, check the archiving_policy archiving_policy = a.bibjson().archiving_policy assert len(archiving_policy.get("policy")) == 4 lcount = 0 scount = 0 for ap in archiving_policy.get("policy"): if isinstance(ap, list): lcount += 1 assert ap[0] in ["A national library", "Other"] assert ap[1] in ["Trinity", "A safe place"] else: scount += 1 assert lcount == 2 assert scount == 2 assert "CLOCKSS" in archiving_policy.get("policy") assert "LOCKSS" in archiving_policy.get("policy") time.sleep(2) s = models.Suggestion.pull(a.id) assert s is not None
def setUp(self): super(TestTaskJournalBulkDelete, self).setUp() self.journals = [] self.articles = [] for j_src in JournalFixtureFactory.make_many_journal_sources( count=TEST_JOURNAL_COUNT): j = models.Journal(**j_src) self.journals.append(j) j.save() for i in range(0, TEST_ARTICLES_PER_JOURNAL): a = models.Article(**ArticleFixtureFactory.make_article_source( with_id=False, eissn=j.bibjson().first_eissn, pissn=j.bibjson().first_pissn)) a.save() self.articles.append(a) sleep(2) self.forbidden_accounts = [ AccountFixtureFactory.make_editor_source()['id'], AccountFixtureFactory.make_assed1_source()['id'], AccountFixtureFactory.make_assed2_source()['id'], AccountFixtureFactory.make_assed3_source()['id'] ] self._make_and_push_test_context(acc=models.Account( **AccountFixtureFactory.make_managing_editor_source()))
def test_07_unmatched_editor(self): """Bulk assign an editor group to a bunch of journals using a background task""" new_eg = EditorGroupFixtureFactory.setup_editor_group_with_editors( group_name='editorgroup') source = JournalFixtureFactory.make_journal_source() source["admin"]["editor"] = "random_editor" journal = models.Journal(**source) journal.save(blocking=True) # test dry run summary = journal_manage({"query": { "terms": { "_id": [journal.id] } }}, doaj_seal=True, dry_run=False) sleep(2) job = models.BackgroundJob.all()[0] assert job.status == "complete", json.dumps(job.data, indent=2) journal2 = models.Journal.pull(journal.id) assert journal2.editor_group == "editorgroup" assert journal2.editor is None
def test_11_delete_article_fail(self): # set up all the bits we need account = models.Account() account.set_id('test') account.set_name("Tester") account.set_email("*****@*****.**") journal = models.Journal(**JournalFixtureFactory.make_journal_source(in_doaj=True)) journal.set_owner(account.id) journal.save() time.sleep(1) data = ArticleFixtureFactory.make_article_source() # call create on the object (which will save it to the index) a = ArticlesCrudApi.create(data, account) # let the index catch up time.sleep(1) # call delete on the object in various context that will fail # without an account with self.assertRaises(Api401Error): ArticlesCrudApi.delete(a.id, None) # with the wrong account account.set_id("other") with self.assertRaises(Api404Error): ArticlesCrudApi.delete(a.id, account) # on the wrong id account.set_id("test") with self.assertRaises(Api404Error): ArticlesCrudApi.delete("adfasdfhwefwef", account)
def test_03_oai_resumption_token(self): """ Test the behaviour of the ResumptionToken in the OAI interface""" # Set the OAI interface to only return two identifiers at a time app.config['OAIPMH_LIST_IDENTIFIERS_PAGE_SIZE'] = 2 [j0, j1, j2, j3, j4] = JournalFixtureFactory.make_many_journal_sources(5, in_doaj=True) # Save a single journal in the index jm0 = models.Journal(**j0) jm0.save(blocking=True) # ListIdentifiers - we expect no resumptionToken because all results are returned with self.app_test.test_request_context(): with self.app_test.test_client() as t_client: resp = t_client.get(url_for('oaipmh.oaipmh', verb='ListIdentifiers', metadataPrefix='oai_dc')) t = etree.fromstring(resp.data) assert t.xpath('//oai:identifier', namespaces=self.oai_ns)[0].text == 'oai:doaj.org/journal:journalid0' assert t.xpath('//oai:resumptionToken', namespaces=self.oai_ns) == [] # Populate index with 4 more journals for j in [j1, j2, j3, j4]: jm = models.Journal(**j) jm.save(blocking=True) # ListIdentifiers - we expect 5 total results and a resumptionToken to fetch the rest with self.app_test.test_request_context(): with self.app_test.test_client() as t_client: resp = t_client.get(url_for('oaipmh.oaipmh', verb='ListIdentifiers', metadataPrefix='oai_dc')) t = etree.fromstring(resp.data) #print etree.tostring(t, pretty_print=True) rt = t.xpath('//oai:resumptionToken', namespaces=self.oai_ns)[0] assert rt.get('completeListSize') == '5' assert rt.get('cursor') == '2' # Get the next result resp2 = t_client.get(url_for('oaipmh.oaipmh', verb='ListIdentifiers', resumptionToken=rt.text)) t = etree.fromstring(resp2.data) #print etree.tostring(t, pretty_print=True) rt2 = t.xpath('//oai:resumptionToken', namespaces=self.oai_ns)[0] assert rt2.get('completeListSize') == '5' assert rt2.get('cursor') == '4' # And the final result - check we get an empty resumptionToken resp3 = t_client.get(url_for('oaipmh.oaipmh', verb='ListIdentifiers', resumptionToken=rt2.text)) t = etree.fromstring(resp3.data) #print etree.tostring(t, pretty_print=True) rt3 = t.xpath('//oai:resumptionToken', namespaces=self.oai_ns)[0] assert rt3.get('completeListSize') == '5' assert rt3.get('cursor') == '5' assert rt3.text is None # We should get an error if we request again with an empty resumptionToken resp4 = t_client.get(url_for('oaipmh.oaipmh', verb='ListIdentifiers') + '&resumptionToken={0}'.format(rt3.text)) assert resp4.status_code == 200 # fixme: should this be a real error code? t = etree.fromstring(resp4.data) #print etree.tostring(t, pretty_print=True) err = t.xpath('//oai:error', namespaces=self.oai_ns)[0] assert 'the resumptionToken argument is invalid or expired' in err.text
def test_10_delete_article_success(self): # set up all the bits we need account = models.Account() account.set_id('test') account.set_name("Tester") account.set_email("*****@*****.**") journal = models.Journal(**JournalFixtureFactory.make_journal_source(in_doaj=True)) journal.set_owner(account.id) journal.save() time.sleep(1) data = ArticleFixtureFactory.make_article_source() # call create on the object (which will save it to the index) a = ArticlesCrudApi.create(data, account) # let the index catch up time.sleep(1) # now delete it ArticlesCrudApi.delete(a.id, account) # let the index catch up time.sleep(1) ap = models.Article.pull(a.id) assert ap is None
def test_07_retrieve_article_fail(self): # set up all the bits we need # add a journal to the account account = models.Account() account.set_id('test') account.set_name("Tester") account.set_email("*****@*****.**") journal = models.Journal(**JournalFixtureFactory.make_journal_source(in_doaj=True)) journal.set_owner(account.id) journal.save() time.sleep(1) data = ArticleFixtureFactory.make_article_source() data['admin']['in_doaj'] = False ap = models.Article(**data) ap.save() time.sleep(1) # should fail when no user and in_doaj is False with self.assertRaises(Api401Error): a = ArticlesCrudApi.retrieve(ap.id, None) # wrong user account = models.Account() account.set_id("asdklfjaioefwe") with self.assertRaises(Api404Error): a = ArticlesCrudApi.retrieve(ap.id, account) # non-existant article account = models.Account() account.set_id(ap.id) with self.assertRaises(Api404Error): a = ArticlesCrudApi.retrieve("ijsidfawefwefw", account)
def setUp(self): super(TestTaskJournalBulkEdit, self).setUp() self.default_eg = EditorGroupFixtureFactory.setup_editor_group_with_editors() acc = models.Account() acc.set_id("0987654321") acc.set_email("*****@*****.**") acc.save() egs = EditorGroupFixtureFactory.make_editor_group_source("1234567890", "0987654321") egm = models.EditorGroup(**egs) egm.save(blocking=True) self.journals = [] for j_src in JournalFixtureFactory.make_many_journal_sources(count=TEST_JOURNAL_COUNT): self.journals.append(models.Journal(**j_src)) self.journals[-1].set_editor_group("1234567890") self.journals[-1].set_editor("0987654321") self.journals[-1].save(blocking=True) self.forbidden_accounts = [ AccountFixtureFactory.make_editor_source()['id'], AccountFixtureFactory.make_assed1_source()['id'], AccountFixtureFactory.make_assed2_source()['id'], AccountFixtureFactory.make_assed3_source()['id'] ] self._make_and_push_test_context(acc=models.Account(**AccountFixtureFactory.make_managing_editor_source()))
def test_08_list_sets(self): journal_source = JournalFixtureFactory.make_journal_source(in_doaj=True) j = models.Journal(**journal_source) j.save(blocking=True) with self.app_test.test_request_context(): with self.app_test.test_client() as t_client: resp = t_client.get(url_for('oaipmh.oaipmh', verb='ListSets', metadataPrefix='oai_dc')) assert resp.status_code == 200 t = etree.fromstring(resp.data) records = t.xpath('/oai:OAI-PMH/oai:ListSets', namespaces=self.oai_ns) sets = records[0].getchildren() assert len(sets) == 2 set0 = sets[0].getchildren() set1 = sets[1].getchildren() assert set0[1].text == 'LCC:Economic theory. Demography' assert set1[1].text == 'LCC:Social Sciences' # check that we can retrieve a record with one of those sets with self.app_test.test_client() as t_client: resp = t_client.get(url_for('oaipmh.oaipmh', verb='ListRecords', metadataPrefix='oai_dc', set=set0[0].text)) assert resp.status_code == 200 t = etree.fromstring(resp.data) records = t.xpath('/oai:OAI-PMH/oai:ListRecords', namespaces=self.oai_ns) results = records[0].getchildren() assert len(results) == 1
def test_11_delete_article_fail(self): # set up all the bits we need account = models.Account() account.set_id('test') account.set_name("Tester") account.set_email("*****@*****.**") journal = models.Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) journal.set_owner(account.id) journal.save() time.sleep(1) data = ArticleFixtureFactory.make_article_source() # call create on the object (which will save it to the index) a = ArticlesCrudApi.create(data, account) # let the index catch up time.sleep(1) # call delete on the object in various context that will fail # without an account with self.assertRaises(Api401Error): ArticlesCrudApi.delete(a.id, None) # with the wrong account account.set_id("other") with self.assertRaises(Api404Error): ArticlesCrudApi.delete(a.id, account) # on the wrong id account.set_id("test") with self.assertRaises(Api404Error): ArticlesCrudApi.delete("adfasdfhwefwef", account)
def test_03_retrieve_public_journal_success(self): # set up all the bits we need data = JournalFixtureFactory.make_journal_source( in_doaj=True, include_obsolete_fields=True) j = models.Journal(**data) j.save() time.sleep(2) a = JournalsCrudApi.retrieve(j.id, account=None) # check that we got back the object we expected assert isinstance(a, OutgoingJournal) assert a.id == j.id # it should also work if we're logged in with the owner or another user # owner first account = models.Account() account.set_id(j.owner) account.set_name("Tester") account.set_email("*****@*****.**") a = JournalsCrudApi.retrieve(j.id, account) assert isinstance(a, OutgoingJournal) assert a.id == j.id # try with another account not_owner = models.Account() not_owner.set_id("asdklfjaioefwe") a = JournalsCrudApi.retrieve(j.id, not_owner) assert isinstance(a, OutgoingJournal) assert a.id == j.id
def setUp(self): super(TestTaskJournalBulkEdit, self).setUp() self.default_eg = EditorGroupFixtureFactory.setup_editor_group_with_editors( ) acc = models.Account() acc.set_id("0987654321") acc.set_email("*****@*****.**") acc.save() egs = EditorGroupFixtureFactory.make_editor_group_source( "1234567890", "0987654321") egm = models.EditorGroup(**egs) egm.save(blocking=True) self.journals = [] for j_src in JournalFixtureFactory.make_many_journal_sources( count=TEST_JOURNAL_COUNT): self.journals.append(models.Journal(**j_src)) self.journals[-1].set_editor_group("1234567890") self.journals[-1].set_editor("0987654321") self.journals[-1].save(blocking=True) self.forbidden_accounts = [ AccountFixtureFactory.make_editor_source()['id'], AccountFixtureFactory.make_assed1_source()['id'], AccountFixtureFactory.make_assed2_source()['id'], AccountFixtureFactory.make_assed3_source()['id'] ] self._make_and_push_test_context(acc=models.Account( **AccountFixtureFactory.make_managing_editor_source()))
def test_14_create_application_update_request_fail(self): data = ApplicationFixtureFactory.incoming_application() journal = models.Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) journal.bibjson().remove_identifiers() journal.bibjson().add_identifier(journal.bibjson().E_ISSN, "9999-8888") journal.bibjson().add_identifier(journal.bibjson().P_ISSN, "7777-6666") journal.bibjson().title = "not changed" journal.set_id(data["admin"]["current_journal"]) journal.set_owner("test") journal.save(blocking=True) # if the account is dud with self.assertRaises(Api401Error): a = ApplicationsCrudApi.create(data, None) # if the data is bust with self.assertRaises(Api400Error): account = models.Account() account.set_id("test") account.set_name("Tester") account.set_email("*****@*****.**") data = {"some": {"junk": "data"}} a = ApplicationsCrudApi.create(data, account)
def test_10_delete_article_success(self): # set up all the bits we need account = models.Account() account.set_id('test') account.set_name("Tester") account.set_email("*****@*****.**") journal = models.Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) journal.set_owner(account.id) journal.save() time.sleep(1) data = ArticleFixtureFactory.make_article_source() # call create on the object (which will save it to the index) a = ArticlesCrudApi.create(data, account) # let the index catch up time.sleep(1) # now delete it ArticlesCrudApi.delete(a.id, account) # let the index catch up time.sleep(1) ap = models.Article.pull(a.id) assert ap is None
def test_03_oai_resumption_token(self): """ Test the behaviour of the ResumptionToken in the OAI interface""" # Set the OAI interface to only return two identifiers at a time app.config['OAIPMH_LIST_IDENTIFIERS_PAGE_SIZE'] = 2 [j0, j1, j2, j3, j4] = JournalFixtureFactory.make_many_journal_sources(5, in_doaj=True) # Save a single journal in the index jm0 = models.Journal(**j0) jm0.save(blocking=True) # ListIdentifiers - we expect no resumptionToken because all results are returned with app.test_client() as t_client: resp = t_client.get('/oai?verb=ListIdentifiers&metadataPrefix=oai_dc') t = etree.fromstring(resp.data) assert t.xpath('//oai:identifier', namespaces=self.oai_ns)[0].text == 'oai:doaj.org/journal:journalid0' assert t.xpath('//oai:resumptionToken', namespaces=self.oai_ns) == [] # Populate index with 4 more journals for j in [j1, j2, j3, j4]: jm = models.Journal(**j) jm.save(blocking=True) # ListIdentifiers - we expect 5 total results and a resumptionToken to fetch the rest with app.test_client() as t_client: resp = t_client.get('/oai?verb=ListIdentifiers&metadataPrefix=oai_dc') t = etree.fromstring(resp.data) #print etree.tostring(t, pretty_print=True) rt = t.xpath('//oai:resumptionToken', namespaces=self.oai_ns)[0] assert rt.get('completeListSize') == '5' assert rt.get('cursor') == '2' # Get the next result resp2 = t_client.get('/oai?verb=ListIdentifiers&resumptionToken={0}'.format(rt.text)) t = etree.fromstring(resp2.data) #print etree.tostring(t, pretty_print=True) rt2 = t.xpath('//oai:resumptionToken', namespaces=self.oai_ns)[0] assert rt2.get('completeListSize') == '5' assert rt2.get('cursor') == '4' # And the final result - check we get an empty resumptionToken resp3 = t_client.get('/oai?verb=ListIdentifiers&resumptionToken={0}'.format(rt2.text)) t = etree.fromstring(resp3.data) #print etree.tostring(t, pretty_print=True) rt3 = t.xpath('//oai:resumptionToken', namespaces=self.oai_ns)[0] assert rt3.get('completeListSize') == '5' assert rt3.get('cursor') == '5' assert rt3.text is None # We should get an error if we request again with an empty resumptionToken resp4 = t_client.get('/oai?verb=ListIdentifiers&resumptionToken={0}'.format(rt3.text)) assert resp4.status_code == 200 # fixme: should this be a real error code? t = etree.fromstring(resp4.data) #print etree.tostring(t, pretty_print=True) err = t.xpath('//oai:error', namespaces=self.oai_ns)[0] assert 'the resumptionToken argument is invalid or expired' in err.text
def test_01_create_articles_success(self): def find_dict_in_list(lst, key, value): for i, dic in enumerate(lst): if dic[key] == value: return i return -1 # set up all the bits we need - 10 articles dataset = [] for i in range(1, 11): data = ArticleFixtureFactory.make_incoming_api_article() # change the DOI and fulltext URLs to escape duplicate detection # and try with multiple articles doi_ix = find_dict_in_list(data['bibjson']['identifier'], 'type', 'doi') if doi_ix == -1: data['bibjson']['identifier'].append({"type": "doi"}) data['bibjson']['identifier'][doi_ix][ 'id'] = '10.0000/SOME.IDENTIFIER.{0}'.format(i) fulltext_url_ix = find_dict_in_list(data['bibjson']['link'], 'type', 'fulltext') if fulltext_url_ix == -1: data['bibjson']['link'].append({"type": "fulltext"}) data['bibjson']['link'][fulltext_url_ix][ 'url'] = 'http://www.example.com/article_{0}'.format(i) dataset.append(deepcopy(data)) # create an account that we'll do the create as account = models.Account() account.set_id("test") account.set_name("Tester") account.set_email("*****@*****.**") # add a journal to the account journal = models.Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) journal.set_owner(account.id) journal.save() time.sleep(2) # call create on the object (which will save it to the index) ids = ArticlesBulkApi.create(dataset, account) # check that we got the right number of ids back assert len(ids) == 10 assert len(list(set(ids))) == 10, len(list( set(ids))) # are they actually 10 unique IDs? # let the index catch up time.sleep(2) # check that each id was actually created for id in ids: s = models.Article.pull(id) assert s is not None
def test_03b_create_update_request_fail(self): # update request target not found with self.assertRaises(Api404Error): data = ApplicationFixtureFactory.incoming_application() publisher = models.Account(**AccountFixtureFactory.make_publisher_source()) try: a = ApplicationsCrudApi.create(data, publisher) except Api404Error as e: raise # if a formcontext exception is raised on finalise publisher = models.Account(**AccountFixtureFactory.make_publisher_source()) journal = models.Journal(**JournalFixtureFactory.make_journal_source(in_doaj=True)) journal.set_id(journal.makeid()) journal.set_owner(publisher.id) journal.save(blocking=True) formcontext.FormContext.finalise = mock_finalise_exception with self.assertRaises(Api400Error): data = ApplicationFixtureFactory.incoming_application() data["admin"]["current_journal"] = journal.id try: a = ApplicationsCrudApi.create(data, publisher) except Api400Error as e: assert e.message == "test exception" raise formcontext.FormContext.finalise = self.old_finalise # validation fails on the formcontext publisher = models.Account(**AccountFixtureFactory.make_publisher_source()) journal = models.Journal(**JournalFixtureFactory.make_journal_source(in_doaj=True)) journal.set_id(journal.makeid()) journal.set_owner(publisher.id) journal.save(blocking=True) IncomingApplication.custom_validate = mock_custom_validate_always_pass with self.assertRaises(Api400Error): data = ApplicationFixtureFactory.incoming_application() # duff submission charges url should trip the validator data["bibjson"]["submission_charges_url"] = "not a url!" data["admin"]["current_journal"] = journal.id try: a = ApplicationsCrudApi.create(data, publisher) except Api400Error as e: raise
def test_13_create_application_update_request_success(self): # set up all the bits we need data = ApplicationFixtureFactory.incoming_application() account = models.Account() account.set_id("test") account.set_name("Tester") account.set_email("*****@*****.**") account.add_role("publisher") account.save(blocking=True) journal = models.Journal(**JournalFixtureFactory.make_journal_source(in_doaj=True)) journal.bibjson().remove_identifiers() journal.bibjson().add_identifier(journal.bibjson().E_ISSN, "9999-8888") journal.bibjson().add_identifier(journal.bibjson().P_ISSN, "7777-6666") journal.bibjson().title = "not changed" journal.set_id(data["admin"]["current_journal"]) journal.set_owner(account.id) journal.save(blocking=True) # call create on the object (which will save it to the index) a = ApplicationsCrudApi.create(data, account) # check that it got created with the right properties assert isinstance(a, models.Suggestion) assert a.id != "ignore_me" assert a.created_date != "2001-01-01T00:00:00Z" assert a.last_updated != "2001-01-01T00:00:00Z" assert a.suggester.get("name") == "Tester" # The suggester should be the owner of the existing journal assert a.suggester.get("email") == "*****@*****.**" assert a.owner == "test" assert a.suggested_on is not None assert a.bibjson().issns() == ["9999-8888", "7777-6666"] or a.bibjson().issns() == ["7777-6666", "9999-8888"] assert a.bibjson().title == "not changed" # also, because it's a special case, check the archiving_policy archiving_policy = a.bibjson().archiving_policy assert len(archiving_policy.get("policy")) == 4 lcount = 0 scount = 0 for ap in archiving_policy.get("policy"): if isinstance(ap, list): lcount += 1 assert ap[0] in ["A national library", "Other"] assert ap[1] in ["Trinity", "A safe place"] else: scount += 1 assert lcount == 2 assert scount == 2 assert "CLOCKSS" in archiving_policy.get("policy") assert "LOCKSS" in archiving_policy.get("policy") time.sleep(2) s = models.Suggestion.pull(a.id) assert s is not None
def test_05_delete_articles_fail(self): # set up all the bits we need dataset = [] for i in range(10): data = ArticleFixtureFactory.make_incoming_api_article( doi="10.123/test/" + str(i), fulltext="http://example.com/" + str(i)) dataset.append(data) # create the main account we're going to work as article_owner = models.Account() article_owner.set_id("test") article_owner.set_name("Tester") article_owner.set_email("*****@*****.**") # create another account which will own the articles so the one # above will be "another user" trying to delete our precious articles. somebody_else = models.Account() somebody_else.set_id("somebody_else") somebody_else.set_name("Somebody Else") somebody_else.set_email("*****@*****.**") # add a journal to the article owner account to create that link # between account and articles journal = models.Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) journal.set_owner(article_owner.id) journal.save() time.sleep(1) # call create on the objects (which will save it to the index) ids = ArticlesBulkApi.create(dataset, article_owner) # let the index catch up time.sleep(2) # call delete on the object in various context that will fail # without an account with self.assertRaises(Api401Error): ArticlesBulkApi.delete(ids, None) # with the wrong account article_owner.set_id("other") with self.assertRaises(Api400Error): ArticlesBulkApi.delete(ids, somebody_else) # on the wrong id ids.append("adfasdfhwefwef") article_owner.set_id("test") with self.assertRaises(Api400Error): ArticlesBulkApi.delete(ids, article_owner) with self.assertRaises(Api400Error): ArticlesBulkApi.delete(ids, article_owner)
def test_03_toc_uses_both_issns_when_available(self): j = models.Journal(**JournalFixtureFactory.make_journal_source(in_doaj=True)) pissn = j.bibjson().first_pissn eissn = j.bibjson().first_eissn j.set_last_manual_update() j.save(blocking=True) a = models.Article(**ArticleFixtureFactory.make_article_source(pissn=pissn, eissn=eissn, in_doaj=True)) a.save(blocking=True) with self.app_test.test_client() as t_client: response = t_client.get('/toc/{}'.format(j.bibjson().get_preferred_issn())) assert response.status_code == 200 assert 'var toc_issns = ["{pissn}","{eissn}"];'.format(pissn=pissn, eissn=eissn) in response.data
def test_11_iterate(self): for jsrc in JournalFixtureFactory.make_many_journal_sources(count=99, in_doaj=True): j = models.Journal(**jsrc) j.save() time.sleep(2) # index all the journals journal_ids = [] theqgen = models.JournalQuery() for j in models.Journal.iterate(q=theqgen.all_in_doaj(), page_size=10): journal_ids.append(j.id) journal_ids = list(set(journal_ids[:])) # keep only unique ids assert len(journal_ids) == 99 assert len(self.list_today_journal_history_files()) == 99
def test_02_update_request(self): acc = models.Account() acc.set_id("richard") acc.add_role("admin") ctx = self._make_and_push_test_context(acc=acc) # There needs to be an existing journal in the index for this test to work jsource = JournalFixtureFactory.make_journal_source() del jsource["admin"]["related_applications"] extant_j = models.Journal(**jsource) assert extant_j.last_update_request is None extant_j_created_date = extant_j.created_date extant_j.save() time.sleep(1) # We've added one journal, so there'll be one snapshot already assert models.Journal.count() == 1 h = self.list_today_journal_history_files() assert len(h) == 1 # set up an application which is an update on an existing journal s = models.Suggestion(**APPLICATION_SOURCE) s.set_current_journal("abcdefghijk_journal") s.set_application_status(constants.APPLICATION_STATUS_UPDATE_REQUEST) # set up the form which "accepts" this update request fd = deepcopy(APPLICATION_FORM) fd["application_status"] = constants.APPLICATION_STATUS_ACCEPTED fd = MultiDict(fd) # create and finalise the form context fc = formcontext.ApplicationFormFactory.get_form_context(role="admin", form_data=fd, source=s) # with app.test_request_context(): fc.finalise() # let the index catch up time.sleep(1) j = models.Journal.pull("abcdefghijk_journal") assert j is not None assert j.created_date == extant_j_created_date assert j.last_update_request is not None assert models.Journal.count() == 1 h = self.list_today_journal_history_files() assert h is not None assert len(h) == 2 ctx.pop()
def test_01_create_articles_success(self): def find_dict_in_list(lst, key, value): for i, dic in enumerate(lst): if dic[key] == value: return i return -1 # set up all the bits we need - 10 articles dataset = [] for i in range(1, 11): data = ArticleFixtureFactory.make_incoming_api_article() # change the DOI and fulltext URLs to escape duplicate detection # and try with multiple articles doi_ix = find_dict_in_list(data['bibjson']['identifier'], 'type', 'doi') if doi_ix == -1: data['bibjson']['identifier'].append({"type" : "doi"}) data['bibjson']['identifier'][doi_ix]['id'] = '10.0000/SOME.IDENTIFIER.{0}'.format(i) fulltext_url_ix = find_dict_in_list(data['bibjson']['link'], 'type', 'fulltext') if fulltext_url_ix == -1: data['bibjson']['link'].append({"type" : "fulltext"}) data['bibjson']['link'][fulltext_url_ix]['url'] = 'http://www.example.com/article_{0}'.format(i) dataset.append(deepcopy(data)) # create an account that we'll do the create as account = models.Account() account.set_id("test") account.set_name("Tester") account.set_email("*****@*****.**") # add a journal to the account journal = models.Journal(**JournalFixtureFactory.make_journal_source(in_doaj=True)) journal.set_owner(account.id) journal.save() time.sleep(2) # call create on the object (which will save it to the index) ids = ArticlesBulkApi.create(dataset, account) # check that we got the right number of ids back assert len(ids) == 10 assert len(list(set(ids))) == 10, len(list(set(ids))) # are they actually 10 unique IDs? # let the index catch up time.sleep(2) # check that each id was actually created for id in ids: s = models.Article.pull(id) assert s is not None
def test_03c_update_update_request_fail(self): # update request target in disallowed status journal = models.Journal(**JournalFixtureFactory.make_journal_source(in_doaj=True)) journal.set_id(journal.makeid()) journal.save(blocking=True) with self.assertRaises(Api404Error): data = ApplicationFixtureFactory.incoming_application() data["admin"]["current_journal"] = journal.id publisher = models.Account(**AccountFixtureFactory.make_publisher_source()) try: a = ApplicationsCrudApi.create(data, publisher) except Api404Error as e: raise
def test_04_timeout(self): source = JournalFixtureFactory.make_journal_source() j = models.Journal(**source) j.save() time.sleep(2) after = datetime.utcnow() + timedelta(seconds=2300) # set a lock with a longer timout l = lock.lock("journal", j.id, "testuser", 2400) assert dates.parse(l.expires) > after
def test_11_iterate(self): for jsrc in JournalFixtureFactory.make_many_journal_sources( count=99, in_doaj=True): j = models.Journal(**jsrc) j.save() time.sleep(2) # index all the journals journal_ids = [] theqgen = models.JournalQuery() for j in models.Journal.iterate(q=theqgen.all_in_doaj(), page_size=10): journal_ids.append(j.id) journal_ids = list(set(journal_ids[:])) # keep only unique ids assert len(journal_ids) == 99 assert len(self.list_today_journal_history_files()) == 99
def test_08_sync_owners(self): # suggestion with no current_journal s = models.Suggestion( **ApplicationFixtureFactory.make_application_source()) s.save() models.Suggestion.refresh() s = models.Suggestion.pull(s.id) assert s is not None # journal with no current_application j = models.Journal(**JournalFixtureFactory.make_journal_source()) j.save() models.Journal.refresh() j = models.Journal.pull(j.id) assert j is not None # suggestion with erroneous current_journal s.set_current_journal("asdklfjsadjhflasdfoasf") s.save() models.Suggestion.refresh() s = models.Suggestion.pull(s.id) assert s is not None # journal with erroneous current_application j.set_current_application("kjwfuiwqhu220952gw") j.save() models.Journal.refresh() j = models.Journal.pull(j.id) assert j is not None # suggestion with journal s.set_owner("my_new_owner") s.set_current_journal(j.id) s.save() models.Journal.refresh() j = models.Journal.pull(j.id) assert j.owner == "my_new_owner" # journal with suggestion j.set_owner("another_new_owner") j.set_current_application(s.id) j.save() models.Suggestion.refresh() s = models.Suggestion.pull(s.id) assert s.owner == "another_new_owner"
def test_05_delete_articles_fail(self): # set up all the bits we need dataset = [] for i in range(10): data = ArticleFixtureFactory.make_incoming_api_article(doi="10.123/test/" + str(i), fulltext="http://example.com/" + str(i)) dataset.append(data) # create the main account we're going to work as article_owner = models.Account() article_owner.set_id("test") article_owner.set_name("Tester") article_owner.set_email("*****@*****.**") # create another account which will own the articles so the one # above will be "another user" trying to delete our precious articles. somebody_else = models.Account() somebody_else.set_id("somebody_else") somebody_else.set_name("Somebody Else") somebody_else.set_email("*****@*****.**") # add a journal to the article owner account to create that link # between account and articles journal = models.Journal(**JournalFixtureFactory.make_journal_source(in_doaj=True)) journal.set_owner(article_owner.id) journal.save() time.sleep(1) # call create on the objects (which will save it to the index) ids = ArticlesBulkApi.create(dataset, article_owner) # let the index catch up time.sleep(2) # call delete on the object in various context that will fail # without an account with self.assertRaises(Api401Error): ArticlesBulkApi.delete(ids, None) # with the wrong account article_owner.set_id("other") with self.assertRaises(Api400Error): ArticlesBulkApi.delete(ids, somebody_else) # on the wrong id ids.append("adfasdfhwefwef") article_owner.set_id("test") with self.assertRaises(Api400Error): ArticlesBulkApi.delete(ids, article_owner) with self.assertRaises(Api400Error): ArticlesBulkApi.delete(ids, article_owner)
def test_08_sync_owners(self): # suggestion with no current_journal s = models.Suggestion(**ApplicationFixtureFactory.make_application_source()) s.save() models.Suggestion.refresh() s = models.Suggestion.pull(s.id) assert s is not None # journal with no current_application j = models.Journal(**JournalFixtureFactory.make_journal_source()) j.save() models.Journal.refresh() j = models.Journal.pull(j.id) assert j is not None # suggestion with erroneous current_journal s.set_current_journal("asdklfjsadjhflasdfoasf") s.save() models.Suggestion.refresh() s = models.Suggestion.pull(s.id) assert s is not None # journal with erroneous current_application j.set_current_application("kjwfuiwqhu220952gw") j.save() models.Journal.refresh() j = models.Journal.pull(j.id) assert j is not None # suggestion with journal s.set_owner("my_new_owner") s.set_current_journal(j.id) s.save() models.Journal.refresh() j = models.Journal.pull(j.id) assert j.owner == "my_new_owner" # journal with suggestion j.set_owner("another_new_owner") j.set_current_application(s.id) j.save() models.Suggestion.refresh() s = models.Suggestion.pull(s.id) assert s.owner == "another_new_owner"
def test_05_toc_correctly_uses_eissn(self): j = models.Journal(**JournalFixtureFactory.make_journal_source(in_doaj=True)) eissn = j.bibjson().first_eissn # remove pissn j.bibjson().remove_identifiers(idtype=j.bibjson().P_ISSN, id=j.bibjson().first_pissn) j.set_last_manual_update() j.save(blocking=True) a = models.Article(**ArticleFixtureFactory.make_article_source(pissn=eissn, in_doaj=True)) a.save(blocking=True) with self.app_test.test_client() as t_client: response = t_client.get('/toc/{}'.format(j.bibjson().get_preferred_issn())) assert response.status_code == 200 assert 'var toc_issns = ["{eissn}"];'.format(eissn=eissn) in response.data
def setUp(self): super(TestCreateOrUpdateArticle, self).setUp() self.publisher = Account() self.publisher.add_role("publisher") self.publisher.save(blocking=True) self.admin = Account() self.admin.add_role("admin") self.admin.save(blocking=True) sources = JournalFixtureFactory.make_many_journal_sources(2, True) self.journal1 = Journal(**sources[0]) self.journal1.set_owner(self.publisher.id) jbib1 = self.journal1.bibjson() jbib1.add_identifier(jbib1.P_ISSN, "1111-1111") jbib1.add_identifier(jbib1.E_ISSN, "2222-2222") self.journal1.save(blocking=True) self.publisher.add_journal(self.journal1) self.journal2 = Journal(**sources[1]) jbib2 = self.journal2.bibjson() jbib2.add_identifier(jbib2.P_ISSN, "1234-5678") jbib2.add_identifier(jbib2.E_ISSN, "9876-5432") self.journal2.save(blocking=True) self.article10 = Article(**ArticleFixtureFactory.make_article_source( pissn="1111-1111", eissn="2222-2222", doi="10.0000/article-10", fulltext="https://www.article10.com")) self.article10.set_id("articleid10") self.article10.save(blocking=True) self.article11 = Article(**ArticleFixtureFactory.make_article_source( pissn="1111-1111", eissn="2222-2222", doi="10.0000/article-11", fulltext="https://www.article11.com")) self.article11.set_id("articleid11") self.article11.save(blocking=True) self.article2 = Article(**ArticleFixtureFactory.make_article_source( pissn="1234-5678", eissn="9876-5432", doi="10.0000/article-2", fulltext="https://www.article2.com")) self.article2.set_id("articleid2") self.article2.save(blocking=True)
def test_05_date_ranges(self): """ Check that the interface adheres to the dates that records were added """ # Set the OAI interface to only return one identifier at a time app.config['OAIPMH_LIST_IDENTIFIERS_PAGE_SIZE'] = 1 journals = JournalFixtureFactory.make_many_journal_sources(4, in_doaj=True) now = datetime.utcnow() yesterday = datetime.utcnow() - timedelta(days=1) day_before_yesterday = datetime.utcnow() - timedelta(days=2) two_days_before_yesterday = datetime.utcnow() - timedelta(days=3) # Save half of our journals 2 days ago with freeze_time(day_before_yesterday): for j in journals[:2]: jm = models.Journal(**j) jm.save(blocking=True) # Save the other half of our journals today with freeze_time(now): for j in journals[2:]: jm = models.Journal(**j) jm.save(blocking=True) # Request OAI journals since yesterday (looking for today's results only) with app.test_client() as t_client: resp = t_client.get('/oai?verb=ListRecords&metadataPrefix=oai_dc&from={0}'.format(yesterday.strftime('%Y-%m-%d'))) t = etree.fromstring(resp.data) #print etree.tostring(t, pretty_print=True) rt = t.xpath('//oai:resumptionToken', namespaces=self.oai_ns)[0] assert rt.get('completeListSize') == '2' assert rt.get('cursor') == '1' for title in t.xpath('//dc:title', namespaces=self.oai_ns): assert title.text in [journals[2]['bibjson']['title'], journals[3]['bibjson']['title']] # Request OAI journals from 3 days ago to yesterday (expecting the 2 days ago results) with app.test_client() as t_client: resp = t_client.get('/oai?verb=ListRecords&metadataPrefix=oai_dc&from={0}&until={1}'.format( two_days_before_yesterday.strftime('%Y-%m-%d'), yesterday.strftime('%Y-%m-%d'))) t = etree.fromstring(resp.data) #print etree.tostring(t, pretty_print=True) rt = t.xpath('//oai:resumptionToken', namespaces=self.oai_ns)[0] assert rt.get('completeListSize') == '2' assert rt.get('cursor') == '1' for title in t.xpath('//dc:title', namespaces=self.oai_ns): assert title.text in [journals[0]['bibjson']['title'], journals[1]['bibjson']['title']]
def test_03_batch_lock_unlock(self): source = JournalFixtureFactory.make_journal_source() ids = [] # create a bunch of journals that we can play with j = models.Journal(**deepcopy(source)) j.save() ids.append(j.id) j = models.Journal(**deepcopy(source)) j.save() ids.append(j.id) j = models.Journal(**deepcopy(source)) j.save() ids.append(j.id) j = models.Journal(**deepcopy(source)) j.save() ids.append(j.id) j = models.Journal(**deepcopy(source)) j.save() ids.append(j.id) time.sleep(2) ls = lock.batch_lock("journal", ids, "testuser") assert len(ls) == 5 time.sleep(2) report = lock.batch_unlock("journal", ids, "testuser") assert len(report["success"]) == 5 assert len(report["fail"]) == 0 time.sleep(2) # now lock an individual record by a different user and check that no locks are set # in batch l = lock.lock("journal", ids[3], "otheruser") time.sleep(2) with self.assertRaises(lock.Locked): ls = lock.batch_lock("journal", ids, "testuser") for id in ids: assert lock.has_lock("journal", id, "testuser") is False
def setUp(self): super(TestCrudReturnValues, self).setUp() account = models.Account.make_account(username="******", name="Tester", email="*****@*****.**", roles=["publisher", "api"], associated_journal_ids=['abcdefghijk_journal']) account.set_password('password123') self.api_key = account.api_key account.save() journal = models.Journal(**JournalFixtureFactory.make_journal_source(in_doaj=True)) journal.set_owner(account.id) journal.save() time.sleep(1)
def test_01_outgoing_journal_do(self): # make a blank one successfully oj = OutgoingJournal() # make one from an incoming journal model fixture data = JournalFixtureFactory.make_journal_source(include_obsolete_fields=True) j = models.Journal(**data) oj = OutgoingJournal.from_model(j) # check that it does not contain information that it shouldn't assert oj.data.get("index") is None assert oj.data.get("history") is None assert oj.data.get("admin", {}).get("active") is None assert oj.data.get("admin", {}).get("notes") is None assert oj.data.get("admin", {}).get("editor_group") is None assert oj.data.get("admin", {}).get("editor") is None
def test_16_update_application_update_request_success(self): # set up all the bits we need data = ApplicationFixtureFactory.incoming_application() account = models.Account() account.set_id("test") account.set_name("Tester") account.set_email("*****@*****.**") account.add_role("publisher") journal = models.Journal(**JournalFixtureFactory.make_journal_source(in_doaj=True)) journal.bibjson().remove_identifiers() journal.bibjson().add_identifier(journal.bibjson().E_ISSN, "9999-8888") journal.bibjson().add_identifier(journal.bibjson().P_ISSN, "7777-6666") journal.bibjson().title = "not changed" journal.set_id(data["admin"]["current_journal"]) journal.set_owner(account.id) journal.save(blocking=True) # call create on the object (which will save it to the index) a = ApplicationsCrudApi.create(data, account) # let the index catch up time.sleep(2) # get a copy of the newly created version for use in assertions later created = models.Suggestion.pull(a.id) # now make an updated version of the object data = ApplicationFixtureFactory.incoming_application() data["bibjson"]["title"] = "An updated title" data["bibjson"]["publisher"] = "An updated publisher" # call update on the object a2 = ApplicationsCrudApi.update(a.id, data, account) assert a2 != a # let the index catch up time.sleep(2) # get a copy of the updated version updated = models.Suggestion.pull(a.id) # now check the properties to make sure the update tool assert updated.bibjson().title == "not changed" assert updated.bibjson().publisher == "An updated publisher" assert updated.created_date == created.created_date
def test_05_anonymise_admin_with_notes(self): journal_src = JournalFixtureFactory.make_journal_source() journal_src['admin'] = { 'owner': 'testuser', 'editor': 'testeditor', 'contact': [{ 'email': '*****@*****.**', 'name': 'Tester Tester' }], 'notes': [ { 'note': 'Test note', 'date': '2017-02-23T00:00:00Z' }, { 'note': 'Test note 2', 'date': '2017-02-23T00:00:00Z' } ] } journal = models.Journal(**journal_src) with freeze_time("2017-02-23"): ar = anon_export._anonymise_admin(journal) assert ar.data['admin'] == { 'owner': 'testuser', 'editor': 'testeditor', 'contact': [{ 'email': '*****@*****.**', 'name': 'Ryan Gallagher' }], 'notes': [ { 'note': 'f4007b0953d4a9ecb7e31820b5d481d96ee5d74a0a059a54f07a326d357ed895', 'date': '2017-02-23T00:00:00Z' }, { 'note': '772cf6f91219db969e4aa28e4fd606b92316948545ad528fd34feb1b9b12a3ad', 'date': '2017-02-23T00:00:00Z' } ] }, ar['admin']