def test_08_sync_owners(self): # suggestion with no current_journal s = models.Suggestion( **ApplicationFixtureFactory.make_application_source()) s.save() models.Suggestion.refresh() s = models.Suggestion.pull(s.id) assert s is not None # journal with no current_application j = models.Journal(**JournalFixtureFactory.make_journal_source()) j.save() models.Journal.refresh() j = models.Journal.pull(j.id) assert j is not None # suggestion with erroneous current_journal s.set_current_journal("asdklfjsadjhflasdfoasf") s.save() models.Suggestion.refresh() s = models.Suggestion.pull(s.id) assert s is not None # journal with erroneous current_application j.set_current_application("kjwfuiwqhu220952gw") j.save() models.Journal.refresh() j = models.Journal.pull(j.id) assert j is not None # suggestion with journal s.set_owner("my_new_owner") s.set_current_journal(j.id) s.save() models.Journal.refresh() j = models.Journal.pull(j.id) assert j.owner == "my_new_owner" # journal with suggestion j.set_owner("another_new_owner") j.set_current_application(s.id) j.save() models.Suggestion.refresh() s = models.Suggestion.pull(s.id) assert s.owner == "another_new_owner"
def test_02_update_request(self): acc = models.Account() acc.set_id("richard") acc.add_role("admin") ctx = self._make_and_push_test_context(acc=acc) # There needs to be an existing journal in the index for this test to work jsource = JournalFixtureFactory.make_journal_source() del jsource["admin"]["related_applications"] extant_j = models.Journal(**jsource) assert extant_j.last_update_request is None extant_j_created_date = extant_j.created_date extant_j.save() time.sleep(1) # We've added one journal, so there'll be one snapshot already assert models.Journal.count() == 1 h = self.list_today_journal_history_files() assert len(h) == 1 # set up an application which is an update on an existing journal s = models.Suggestion(**APPLICATION_SOURCE) s.set_current_journal("abcdefghijk_journal") s.set_application_status(constants.APPLICATION_STATUS_UPDATE_REQUEST) # set up the form which "accepts" this update request fd = deepcopy(APPLICATION_FORM) fd["application_status"] = constants.APPLICATION_STATUS_ACCEPTED fd = MultiDict(fd) # create and finalise the form context fc = formcontext.ApplicationFormFactory.get_form_context(role="admin", form_data=fd, source=s) # with app.test_request_context(): fc.finalise() # let the index catch up time.sleep(1) j = models.Journal.pull("abcdefghijk_journal") assert j is not None assert j.created_date == extant_j_created_date assert j.last_update_request is not None assert models.Journal.count() == 1 h = self.list_today_journal_history_files() assert h is not None assert len(h) == 2 ctx.pop()
def test_05_toc_correctly_uses_eissn(self): j = models.Journal(**JournalFixtureFactory.make_journal_source(in_doaj=True)) eissn = j.bibjson().first_eissn # remove pissn j.bibjson().remove_identifiers(idtype=j.bibjson().P_ISSN, id=j.bibjson().first_pissn) j.set_last_manual_update() j.save(blocking=True) a = models.Article(**ArticleFixtureFactory.make_article_source(pissn=eissn, in_doaj=True)) a.save(blocking=True) with self.app_test.test_client() as t_client: response = t_client.get('/toc/{}'.format(j.bibjson().get_preferred_issn())) assert response.status_code == 200 assert 'var toc_issns = ["{eissn}"];'.format(eissn=eissn) in response.data
def test_30_article_journal_sync(self): j = models.Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) a = models.Article(**ArticleFixtureFactory.make_article_source( in_doaj=False, with_journal_info=False)) assert a.has_seal() is False assert a.bibjson().journal_issns != j.bibjson().issns() reg = models.Journal() changed = a.add_journal_metadata(j, reg) assert changed is True assert a.has_seal() is True assert a.is_in_doaj() is True assert a.bibjson().journal_issns == j.bibjson().issns() assert a.bibjson().publisher == j.bibjson().publisher assert a.bibjson().journal_country == j.bibjson().country assert a.bibjson().journal_language == j.bibjson().language assert a.bibjson().journal_title == j.bibjson().title changed = a.add_journal_metadata(j) assert changed is False
def test_05_best_journal(self): j1 = models.Journal() j1.set_in_doaj(True) j1.set_last_manual_update("2001-01-01T00:00:00Z") j1.save() j2 = models.Journal() j2.set_in_doaj(True) j2.save() j3 = models.Journal() j3.set_in_doaj(False) j3.set_last_manual_update("2002-01-01T00:00:00Z") j3.save() j4 = models.Journal() j4.set_in_doaj(False) j4.save(blocking=True) job = article_cleanup_sync.ArticleCleanupSyncBackgroundTask.prepare( "testuser") task = article_cleanup_sync.ArticleCleanupSyncBackgroundTask(job) # the degenerate case, when there's only one journal best = task._get_best_journal([j1]) assert best.id == j1.id # should return in doaj over not in doaj best = task._get_best_journal([j1, j3]) assert best.id == j1.id # should return manually updated over not manually updated best = task._get_best_journal([j1, j2]) assert best.id == j1.id best = task._get_best_journal([j3, j4]) assert best.id == j3.id
def test_02_oai_journals(self): """test if the OAI-PMH journal feed returns records and only displays journals accepted in DOAJ""" journal_sources = JournalFixtureFactory.make_many_journal_sources(2, in_doaj=True) j_public = models.Journal(**journal_sources[0]) j_public.save(blocking=True) public_id = j_public.id j_private = models.Journal(**journal_sources[1]) j_private.set_in_doaj(False) j_private.save(blocking=True) with self.app_test.test_request_context(): with self.app_test.test_client() as t_client: resp = t_client.get(url_for('oaipmh.oaipmh', verb='ListRecords', metadataPrefix='oai_dc')) assert resp.status_code == 200 t = etree.fromstring(resp.data) records = t.xpath('/oai:OAI-PMH/oai:ListRecords', namespaces=self.oai_ns) # Check we only have one journal returned assert len(records[0].xpath('//oai:record', namespaces=self.oai_ns)) == 1 # Check we have the correct journal assert records[0].xpath('//dc:title', namespaces=self.oai_ns)[0].text == j_public.bibjson().title resp = t_client.get(url_for('oaipmh.oaipmh', verb='GetRecord', metadataPrefix='oai_dc') + '&identifier={0}'.format(public_id)) assert resp.status_code == 200 t = etree.fromstring(resp.data) records = t.xpath('/oai:OAI-PMH/oai:GetRecord', namespaces=self.oai_ns) # Check we only have one journal returned assert len(records[0].xpath('//oai:record', namespaces=self.oai_ns)) == 1 # Check we have the correct journal assert records[0].xpath('//dc:title', namespaces=self.oai_ns)[0].text == j_public.bibjson().title
def test_03c_update_update_request_fail(self): # update request target in disallowed status journal = models.Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) journal.set_id(journal.makeid()) journal.save(blocking=True) with self.assertRaises(Api404Error): data = ApplicationFixtureFactory.incoming_application() data["admin"]["current_journal"] = journal.id publisher = models.Account( **AccountFixtureFactory.make_publisher_source()) try: a = ApplicationsCrudApi.create(data, publisher) except Api404Error as e: raise
def test_02_journal_model_rw(self): """Read and write properties into the journal model""" j = models.Journal() j.set_current_application("1234567") j.set_last_reapplication("2001-04-19T01:01:01Z") j.set_bulk_upload_id("abcdef") assert j.data.get("admin", {}).get("current_application") == "1234567" assert j.current_application == "1234567" assert j.last_reapplication == "2001-04-19T01:01:01Z" assert j.bulk_upload_id == "abcdef" now = datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ") j.set_last_reapplication() assert j.last_reapplication == now # should work, since this ought to take less than a second, but it might sometimes fail
def test_01_journal2questions(self): journal = models.Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) journal.prep() q_and_a = Journal2QuestionXwalk.journal2question(journal) answers = [str(x[1]) for x in q_and_a] expected = JournalFixtureFactory.question_answers() for i in range(len(answers)): a = answers[i] if a != expected[i]: print(("{c} = {a} | {b}".format(a=a, b=expected[i], c=q_and_a[i]))) assert answers == expected
def test_06_identify(self): journal_source = JournalFixtureFactory.make_journal_source(in_doaj=True) j = models.Journal(**journal_source) j.save(blocking=True) with self.app_test.test_request_context(): with self.app_test.test_client() as t_client: resp = t_client.get(url_for('oaipmh.oaipmh', verb='Identify', metadataPrefix='oai_dc')) assert resp.status_code == 200 t = etree.fromstring(resp.data) records = t.xpath('/oai:OAI-PMH/oai:Identify', namespaces=self.oai_ns) assert len(records) == 1 assert records[0].xpath('//oai:repositoryName', namespaces=self.oai_ns)[0].text == 'Directory of Open Access Journals' assert records[0].xpath('//oai:adminEmail', namespaces=self.oai_ns)[0].text == '*****@*****.**' assert records[0].xpath('//oai:granularity', namespaces=self.oai_ns)[0].text == 'YYYY-MM-DDThh:mm:ssZ'
def test_03_toc_uses_both_issns_when_available(self): j = models.Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) pissn = j.bibjson().first_pissn eissn = j.bibjson().first_eissn j.set_last_manual_update() j.save() a = models.Article(**ArticleFixtureFactory.make_article_source( pissn=pissn, eissn=eissn, in_doaj=True)) a.save(blocking=True) with self.app_test.test_client() as t_client: response = t_client.get('/toc/{}'.format( j.bibjson().get_preferred_issn())) assert response.status_code == 200 assert 'var toc_issns = ["{pissn}","{eissn}"];'.format( pissn=pissn, eissn=eissn) in response.data
def setUp(self): super(TestCrudReturnValues, self).setUp() account = models.Account.make_account(username="******", name="Tester", email="*****@*****.**", roles=["publisher", "api"], associated_journal_ids=['abcdefghijk_journal']) account.set_password('password123') self.api_key = account.api_key account.save() journal = models.Journal(**JournalFixtureFactory.make_journal_source(in_doaj=True)) journal.set_owner(account.id) journal.save() time.sleep(1)
def test_01_object(self): # first try requesting a feed over the empty test index f = atom.get_feed("http://my.test.com") assert len(f.entries.keys()) == 0 assert f.url == "http://my.test.com" # now populate the index and then re-get the feed ids = [] for i in range(5): j = models.Journal() j.set_in_doaj(True) bj = j.bibjson() bj.title = "Test Journal {x}".format(x=i) bj.add_identifier(bj.P_ISSN, "{x}000-0000".format(x=i)) bj.publisher = "Test Publisher {x}".format(x=i) bj.add_subject("LCC", "Agriculture") bj.add_url("http://homepage.com/{x}".format(x=i), "homepage") j.save() ids.append(j.id) # make sure the last updated dates are suitably different time.sleep(1) time.sleep(1) with self.app_test.test_request_context('/feed'): f = atom.get_feed("http://my.test.com") assert len(f.entries.keys()) == 5 # now go through the entries in order, and check they are as expected entry_dates = f.entries.keys() entry_dates.sort() for i in range(5): e = f.entries.get(entry_dates[i])[0] assert e["author"] == "Test Publisher {x}".format(x=i) assert len(e["categories"]) == 1 assert e["categories"][0] == "LCC:Agriculture" assert e["content_src"].endswith("{x}000-0000?rss".format(x=i)) assert e["alternate"].endswith("{x}000-0000?rss".format(x=i)) assert e["id"] == "urn:uuid:" + ids[i] assert e["related"] == "http://homepage.com/{x}".format(x=i) assert "rights" in e assert e["summary"].startswith( "Published by Test Publisher {x}".format(x=i)) assert e["title"] == "Test Journal {x} ({x}000-0000)".format(x=i) assert "updated" in e
def test_16_update_application_update_request_success(self): # set up all the bits we need data = ApplicationFixtureFactory.incoming_application() account = models.Account() account.set_id("test") account.set_name("Tester") account.set_email("*****@*****.**") account.add_role("publisher") journal = models.Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) journal.bibjson().remove_identifiers() journal.bibjson().add_identifier(journal.bibjson().E_ISSN, "9999-8888") journal.bibjson().add_identifier(journal.bibjson().P_ISSN, "7777-6666") journal.bibjson().title = "not changed" journal.set_id(data["admin"]["current_journal"]) journal.set_owner(account.id) journal.save(blocking=True) # call create on the object (which will save it to the index) a = ApplicationsCrudApi.create(data, account) # let the index catch up time.sleep(2) # get a copy of the newly created version for use in assertions later created = models.Suggestion.pull(a.id) # now make an updated version of the object data = ApplicationFixtureFactory.incoming_application() data["bibjson"]["title"] = "An updated title" data["bibjson"]["publisher"] = "An updated publisher" # call update on the object a2 = ApplicationsCrudApi.update(a.id, data, account) assert a2 != a # let the index catch up time.sleep(2) # get a copy of the updated version updated = models.Suggestion.pull(a.id) # now check the properties to make sure the update tool assert updated.bibjson().title == "not changed" assert updated.bibjson().publisher == "An updated publisher" assert updated.created_date == created.created_date
def test_01_outgoing_journal_do(self): # make a blank one successfully oj = OutgoingJournal() # make one from an incoming journal model fixture data = JournalFixtureFactory.make_journal_source( include_obsolete_fields=True) j = models.Journal(**data) oj = OutgoingJournal.from_model(j) # check that it does not contain information that it shouldn't assert oj.data.get("index") is None assert oj.data.get("history") is None assert oj.data.get("admin", {}).get("active") is None assert oj.data.get("admin", {}).get("notes") is None assert oj.data.get("admin", {}).get("editor_group") is None assert oj.data.get("admin", {}).get("editor") is None
def test_05_anonymise_admin_with_notes(self): journal_src = JournalFixtureFactory.make_journal_source() journal_src['admin'] = { 'owner': 'testuser', 'editor': 'testeditor', 'contact': [{ 'email': '*****@*****.**', 'name': 'Tester Tester' }], 'notes': [{ 'note': 'Test note', 'date': '2017-02-23T00:00:00Z' }, { 'note': 'Test note 2', 'date': '2017-02-23T00:00:00Z' }] } journal = models.Journal(**journal_src) with freeze_time("2017-02-23"): ar = anon_export._anonymise_admin(journal) assert ar.data['admin'] == { 'owner': 'testuser', 'editor': 'testeditor', 'contact': [{ 'email': '*****@*****.**', 'name': 'Ryan Gallagher' }], 'notes': [{ 'note': 'f4007b0953d4a9ecb7e31820b5d481d96ee5d74a0a059a54f07a326d357ed895', 'date': '2017-02-23T00:00:00Z' }, { 'note': '772cf6f91219db969e4aa28e4fd606b92316948545ad528fd34feb1b9b12a3ad', 'date': '2017-02-23T00:00:00Z' }] }, ar['admin']
def test_04_remove_wrong_history(self): # make a journal that is a continuation j = models.Journal() bibjson = j.bibjson() bibjson.title = "An example Journal" bibjson.add_identifier(bibjson.E_ISSN, "1234-5678") j.snapshot(isreplacedby="9876-5432") bibjson.remove_identifiers(bibjson.E_ISSN) bibjson.add_identifier(bibjson.E_ISSN, "9876-5432") bibjson.title = "An updated journal" # delete the (wrong) continuation j.remove_history("7564-0912") # check the history is unchanged history = j.history() assert len(history) == 1
def test_33_article_stats(self): articles = [] # make a bunch of articles variably in doaj/not in doaj, for/not for the issn we'll search for i in range(1, 3): article = models.Article( **ArticleFixtureFactory.make_article_source(eissn="1111-1111", pissn="1111-1111", with_id=False, in_doaj=True)) article.set_created("2019-01-0" + str(i) + "T00:00:00Z") articles.append(article) for i in range(3, 5): article = models.Article( **ArticleFixtureFactory.make_article_source(eissn="1111-1111", pissn="1111-1111", with_id=False, in_doaj=False)) article.set_created("2019-01-0" + str(i) + "T00:00:00Z") articles.append(article) for i in range(5, 7): article = models.Article( **ArticleFixtureFactory.make_article_source(eissn="2222-2222", pissn="2222-2222", with_id=False, in_doaj=True)) article.set_created("2019-01-0" + str(i) + "T00:00:00Z") articles.append(article) for i in range(7, 9): article = models.Article( **ArticleFixtureFactory.make_article_source(eissn="2222-2222", pissn="2222-2222", with_id=False, in_doaj=False)) article.set_created("2019-01-0" + str(i) + "T00:00:00Z") articles.append(article) for i in range(len(articles)): articles[i].save(blocking=i == len(articles) - 1) journal = models.Journal() bj = journal.bibjson() bj.add_identifier(bj.P_ISSN, "1111-1111") stats = journal.article_stats() assert stats.get("total") == 2 assert stats.get("latest") == "2019-01-02T00:00:00Z"
def migrate(data): if "history" not in data: return data admin = data.get("admin", {}) issns = _get_issns(data.get("bibjson", {})) replaces = None created = data.get( "created_date" ) # created date defaults to the parent record's created date - will be overridden later if possible for h in data["history"]: obj = {"bibjson": h.get("bibjson")} if "replaces" in h: obj["bibjson"]["replaces"] = h.get("replaces") if "isreplacedby" in h: obj["bibjson"]["is_replaced_by"] = h.get("isreplacedby") for irb in h["isreplacedby"]: if irb in issns: replaces = _get_issns(obj["bibjson"]) #if "date" in h: # created = h["date"] j = models.Journal(**obj) j.set_in_doaj(admin.get("in_doaj", False)) j.set_ticked(admin.get("ticked", False)) j.set_seal(admin.get("seal", False)) j.set_owner(admin.get("owner")) j.set_editor_group(admin.get("editor_group")) j.set_editor(admin.get("editor")) for c in admin.get("contact", []): j.add_contact(c.get("name"), c.get("email")) # FIXME: note that the date on the history records appears to be the date of import from the last version of DOAJ, so not much value in it. Defaulting to the parent's created date if created is not None: j.set_created(created) j.add_note( u"Continuation automatically extracted from journal {x} during migration" .format(x=data.get("id"))) j.save() if replaces is not None: data["bibjson"]["replaces"] = replaces del data["history"] return data
def test_03_snapshot_journal(self): # make ourselves an example journal j = models.Journal() b = j.bibjson() b.title = "Example journal" b.add_url("http://examplejournal.telfor.rs") # the snapshot is part of the save method j.save() # let the index catch up, then we can check this worked time.sleep(5) history_files = self.list_today_journal_history_files() assert len(history_files) == 1 with open(history_files[0], 'r', encoding="utf-8") as i: hist = json.loads(i.read()) assert hist.get("bibjson", {}).get("title") == "Example journal"
def test_01_row(self): j = models.Journal() b = j.bibjson() b.title = "My Title" b.alternative_title = "Alt Title" b.add_url("http://home.com", "homepage") b.add_url("http://other.com", "other") b.publisher = "Journal House" b.add_language("en") b.add_language("fr") b.add_identifier(b.P_ISSN, "1234-5678") b.add_identifier(b.E_ISSN, "9876-5432") b.add_keyword("one") b.add_keyword("two") b.set_oa_start("2004") b.set_oa_end("2007") b.add_subject("LCC", "Medicine") b.country = "GB" b.set_license("CC BY", "CC BY") j.set_in_doaj(True) j.prep() row = j.csv() assert len(row) == 17 assert row[0] == "My Title" assert row[1] == "Alt Title" assert row[2] == "http://home.com", row[2] assert row[3] == "Journal House" assert row[4] in ["en,fr", "fr,en"] assert row[5] == "1234-5678" assert row[6] == "9876-5432" assert row[7] in ["one,two", "two,one"] assert row[8] == "2004" assert row[9] == "2007" # assert row[10] is not None and row[10] != "", row[10] # created_date, only set on save() assert row[11] == "Medicine" assert row[12] == "United Kingdom", row[12] assert row[13] == "" assert row[14] == "" assert row[15] == "BY" assert row[16] == "Yes"
def test_22_make_continuation_errors(self): journal = models.Journal() bj = journal.bibjson() bj.add_identifier(bj.E_ISSN, "0000-0000") bj.add_identifier(bj.P_ISSN, "1111-1111") bj.title = "First Journal" journal.save() time.sleep(2) with self.assertRaises(models.ContinuationException): cont = journal.make_continuation("sideways", eissn="2222-2222", pissn="3333-3333", title="Second Journal") with self.assertRaises(models.ContinuationException): cont = journal.make_continuation("replaces", title="Second Journal")
def test_05_retrieve_private_journal_success(self): # set up all the bits we need data = JournalFixtureFactory.make_journal_source( include_obsolete_fields=True) j = models.Journal(**data) j.save() time.sleep(2) account = models.Account() account.set_id(j.owner) account.set_name("Tester") account.set_email("*****@*****.**") # call retrieve on the object a = JournalsCrudApi.retrieve(j.id, account) # check that we got back the object we expected assert isinstance(a, OutgoingJournal) assert a.id == j.id
def test_09_update_article_fail(self): # set up all the bits we need account = models.Account() account.set_id('test') account.set_name("Tester") account.set_email("*****@*****.**") journal = models.Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) journal.set_owner(account.id) journal.save() time.sleep(1) data = ArticleFixtureFactory.make_article_source() # call create on the object (which will save it to the index) a = ArticlesCrudApi.create(data, account) # let the index catch up time.sleep(1) # get a copy of the newly created version for use in assertions later created = models.Article.pull(a.id) # now make an updated version of the object data = ArticleFixtureFactory.make_article_source() data["bibjson"]["title"] = "An updated title" # call update on the object in various context that will fail # without an account with self.assertRaises(Api401Error): ArticlesCrudApi.update(a.id, data, None) # with the wrong account account.set_id("other") with self.assertRaises(Api404Error): ArticlesCrudApi.update(a.id, data, account) # on the wrong id account.set_id("test") with self.assertRaises(Api404Error): ArticlesCrudApi.update("adfasdfhwefwef", data, account)
def test_06_deep_paging_limit(self): # populate the index with some journals jids = [] for i in range(10): j = models.Journal() j.set_in_doaj(True) bj = j.bibjson() bj.title = "Test Journal {x}".format(x=i) bj.add_identifier(bj.P_ISSN, "{x}000-0000".format(x=i)) bj.publisher = "Test Publisher {x}".format(x=i) bj.add_url("http://homepage.com/{x}".format(x=i), "homepage") j.save() jids.append((j.id, j.last_updated)) self.app_test.config["DISCOVERY_MAX_RECORDS_SIZE"] = 5 # block until all the records are saved for jid, lu in jids: models.Journal.block(jid, lu, sleep=0.05) # now run some queries with self.app_test.test_request_context(): # check that the first page still works res = DiscoveryApi.search("journal", None, "*", 1, 5) assert res.data.get("total") == 10 assert len(res.data.get("results")) == 5 assert res.data.get("page") == 1 assert res.data.get("pageSize") == 5 # but that the second page fails with self.assertRaises(DiscoveryException): try: res = DiscoveryApi.search("journal", None, "*", 2, 5) except DiscoveryException as e: data_dump_url = url_for("doaj.public_data_dump") oai_article_url = url_for("oaipmh.oaipmh", specified="article") oai_journal_url = url_for("oaipmh.oaipmh") assert data_dump_url in e.message assert oai_article_url in e.message assert oai_journal_url in e.message raise
def test_02_outgoing_journal_urls(self): """ We've relaxed the URL constraints for outgoing journals - https://github.com/DOAJ/doajPM/issues/2268 """ data = JournalFixtureFactory.make_journal_source( include_obsolete_fields=True) invalid_url = 'an invalid url $321 >>,' data['bibjson']['submission_charges_url'] = invalid_url data['bibjson']['editorial_review']['url'] = invalid_url data['bibjson']['plagiarism_detection']['url'] = invalid_url data['bibjson']['article_statistics']['url'] = invalid_url data['bibjson']['author_copyright']['url'] = invalid_url data['bibjson']['author_publishing_rights']['url'] = invalid_url for l in data['bibjson']['link']: l['url'] = invalid_url # Even with all of the dodgy URLS above, we should still have a successful OutgoingJournal object. j = models.Journal(**data) OutgoingJournal.from_model(j)
def test_01_maned_review_emails(self): """ Ensure the Managing Editor's journal review form sends the right emails""" journal = models.Journal(**JOURNAL_SOURCE_TEST_1) # Construct an journal form fc = formcontext.JournalFormFactory.get_form_context(role="admin", source=journal) assert isinstance(fc, formcontext.ManEdJournalReview) # If we change the editor group or assigned editor, emails should be sent to editors fc.form.editor_group.data = "Test Editor Group" fc.form.editor.data = "associate_3" with self.app_test.test_request_context(): fc.finalise() info_stream_contents = self.info_stream.getvalue() # check the associate was changed assert fc.target.editor == "associate_3" # We expect 2 emails to be sent: # * to the editor of the assigned group, # * to the AssEd who's been assigned, editor_template = re.escape('editor_journal_assigned_group.txt') editor_to = re.escape('*****@*****.**') editor_subject = 'new journal assigned to your group' editor_email_matched = re.search( email_log_regex % (editor_template, editor_to, editor_subject), info_stream_contents, re.DOTALL) assert bool(editor_email_matched) assEd_template = 'assoc_editor_journal_assigned.txt' assEd_to = re.escape(models.Account.pull('associate_3').email) assEd_subject = 'new journal assigned to you' assEd_email_matched = re.search( email_log_regex % (assEd_template, assEd_to, assEd_subject), info_stream_contents, re.DOTALL) assert bool(assEd_email_matched) assert len(re.findall(email_count_string, info_stream_contents)) == 2
def test_06_retrieve_article_success(self): # set up all the bits we need # add a journal to the account account = models.Account() account.set_id('test') account.set_name("Tester") account.set_email("*****@*****.**") journal = models.Journal(**JournalFixtureFactory.make_journal_source( in_doaj=True)) journal.set_owner(account.id) journal.save() time.sleep(1) data = ArticleFixtureFactory.make_article_source() ap = models.Article(**data) ap.save() time.sleep(1) # call retrieve on the object with a valid user a = ArticlesCrudApi.retrieve(ap.id, account) # call retrieve with no user (will return if in_doaj is True) a = ArticlesCrudApi.retrieve(ap.id, None) # check that we got back the object we expected assert isinstance(a, OutgoingArticleDO) assert a.id == ap.id assert a.bibjson.journal.start_page == '3', a.bibjson.journal.start_page assert a.bibjson.journal.end_page == '21' assert a.bibjson.journal.volume == '1' assert a.bibjson.journal.number == '99' assert a.bibjson.journal.publisher == 'The Publisher', a.bibjson( ).publisher assert a.bibjson.journal.title == 'The Title' assert a.bibjson.journal.license[0].title == "CC BY" assert a.bibjson.journal.license[0].type == "CC BY" assert a.bibjson.journal.license[0].url == "http://license.example.com" assert a.bibjson.journal.license[0].version == "1.0" assert a.bibjson.journal.license[0].open_access == True assert a.bibjson.journal.language == ["EN", "FR"] assert a.bibjson.journal.country == "US"
def setUp(self): super(TestTaskArticleBulkDelete, self).setUp() ArticleBulkDeleteBackgroundTask.BATCH_SIZE = 13 self.journals = [] self.articles = [] for j_src in JournalFixtureFactory.make_many_journal_sources(count=TEST_JOURNAL_COUNT): j = models.Journal(**j_src) self.journals.append(j) j.save() for i in range(0, TEST_ARTICLES_PER_JOURNAL): a = models.Article( **ArticleFixtureFactory.make_article_source(with_id=False, eissn=j.bibjson().first_eissn, pissn=j.bibjson().first_pissn)) a.save() self.articles.append(a) sleep(2) self._make_and_push_test_context(acc=models.Account(**AccountFixtureFactory.make_managing_editor_source()))
def test_05_maned_review_continuations(self): # construct it from form data (with a known source) fc = formcontext.JournalFormFactory.get_form_context( role="admin", form_data=MultiDict(JOURNAL_FORM), source=models.Journal(**JOURNAL_SOURCE)) # check the form has the continuations data assert fc.form.replaces.data == ["1111-1111"] assert fc.form.is_replaced_by.data == ["2222-2222"] assert fc.form.discontinued_date.data == "2001-01-01" # run the crosswalk, don't test it at all in this test fc.form2target() # patch the target with data from the source fc.patch_target() # ensure the model has the continuations data assert fc.target.bibjson().replaces == ["1111-1111"] assert fc.target.bibjson().is_replaced_by == ["2222-2222"] assert fc.target.bibjson().discontinued_date == "2001-01-01"