def test_14_process_enhancements_exception(self): # What happens when processing an enhancement fails sources = EnhancementFixtureFactory.request_per_day("2001-01", 9) dois = ["10.1234/first", "10.1234/second", "10.1234/third"] # we're going to construct a series of enhancements for each doi for i in range(len(sources)): s = sources[i] doi_idx = i % 3 # iterate over the dois 3 times doi = dois[doi_idx] s["record"]["dc:identifier"] = [{"type": "doi", "id": doi}] en = Enhancement(s) en.save() time.sleep(2) # set up the mock PublicApi.publish = publish_mock # now run the process job back to the first day with self.assertRaises(TestException): WorkflowApi.process_enhancements() time.sleep(2) # we know this died during the 6th update request being processed, # so just check that the workflow state reflects that wfs_dao = WorkflowState() wfs = wfs_dao.pull("enhancements") assert wfs.last_request == "2001-01-05T00:00:00Z" assert len(wfs.already_processed) == 1
def process_updates(): """ Process all new Requests and all new Enhancements """ print "[{x}] Processing Requests".format(x=dates.now()) WorkflowApi.process_requests() WorkflowApi.process_enhancements()
def test_11_process_requests_exception(self): # What happens when the process_reuests method fails for a variety of reasons sources = RequestFixtureFactory.request_per_day("2001-01", 9) dois = ["10.1234/first", "10.1234/second", "10.1234/third"] # we're going to construct a series of requests for each doi # starting with a create, then an update, followed by a delete # (not that it matters, as we're going to pump them through a mock) for i in range(len(sources)): s = sources[i] doi_idx = i % 3 # iterate over the dois 3 times doi = dois[doi_idx] s["record"]["dc:identifier"] = [{"type": "doi", "id": doi}] if i < 3: s["record"]["dc:title"] = "Create" req = Request(s) req.action = "update" req.save() elif i < 6: s["record"]["dc:title"] = "Update" req = Request(s) req.action = "update" req.save() else: s["record"]["dc:title"] = "Delete" req = Request(s) req.action = "delete" req.save() time.sleep(2) # set up the mocks PublicApi.publish = publish_mock PublicApi.remove = delete_mock # now run the process job back to the first day with self.assertRaises(TestException): WorkflowApi.process_requests() # we know this died during the 6th update request being processed, # so just check that the workflow state reflects that wfs_dao = WorkflowState() wfs = wfs_dao.pull("requests") assert wfs.last_request == "2001-01-05T00:00:00Z" assert len(wfs.already_processed) == 1
def test_13_process_ehnancements_cycle(self): # Run through the process of processing an enhancement source = EnhancementFixtureFactory.example() if "id" in source: del source["id"] pub_dao = PublicAPC() wfs_dao = WorkflowState() # first make a public record for us to enhance first = PublicAPCFixtureFactory.example() del first["record"]["dc:title"] pub = PublicAPC(first) pub.save(blocking=True) # now create an enhancements on the record second = deepcopy(source) second["record"]["dc:title"] = "Update" second["created_date"] = "2002-01-01T00:00:00Z" en = Enhancement(second) en.public_id = pub.id en.save(blocking=True) # run the job WorkflowApi.process_enhancements() time.sleep(2) # check that the workflow state was created wfs = wfs_dao.pull("enhancements") assert wfs is not None assert wfs.last_request == en.created_date assert wfs.already_processed == [en.id] # check the public record was updated pubs = pub_dao.find_by_doi("10.1234/me") assert len(pubs) == 1 assert pubs[0].record.get("dc:title") == "Update" # now run an update with the same date, to observe the difference in the workflow state third = deepcopy(source) third["record"]["dc:title"] = "Update 2" third["created_date"] = "2002-01-01T00:00:00Z" en2 = Enhancement(third) en2.public_id = pub.id en2.save(blocking=True) # run the job again WorkflowApi.process_enhancements() time.sleep(2) # check the public record was updated pubs = pub_dao.find_by_doi("10.1234/me") assert len(pubs) == 1 assert ( pubs[0].record.get("dc:title") == "Update" ) # should not have been updated, since data was already present # check that the workflow state was updated wfs = wfs_dao.pull("enhancements") assert wfs is not None assert wfs.last_request == en2.created_date assert wfs.already_processed == [en.id, en2.id] # processed records should have been appended
def test_11_process_requests_cycle(self): # Run through the process of processing a Request into a PublicAPC source = RequestFixtureFactory.example() if "id" in source: del source["id"] pub_dao = PublicAPC() wfs_dao = WorkflowState() # first make a record for the first time first = deepcopy(source) del first["record"]["dc:title"] req = Request(first) req.owner = "test" req.action = "update" req.save(blocking=True) # run the job WorkflowApi.process_requests() time.sleep(2) # first check that a public record was made pubs = pub_dao.find_by_doi("10.1234/me") assert len(pubs) == 1 assert pubs[0].record.get("dc:title") is None # check that the workflow state was created wfs = wfs_dao.pull("requests") assert wfs is not None assert wfs.last_request == req.created_date assert wfs.already_processed == [req.id] # now run an update with a different date second = deepcopy(source) second["record"]["dc:title"] = "Update" second["created_date"] = "2002-01-01T00:00:00Z" req2 = Request(second) req2.owner = "test" req2.action = "update" req2.save(blocking=True) # run the job again WorkflowApi.process_requests() time.sleep(2) # check the public record was updated pubs = pub_dao.find_by_doi("10.1234/me") assert len(pubs) == 1 assert pubs[0].record.get("dc:title") == "Update" # check that the workflow state was updated wfs = wfs_dao.pull("requests") assert wfs is not None assert wfs.last_request == req2.created_date assert wfs.already_processed == [req2.id] # now run an update with the same date, to observe the difference in the workflow state third = deepcopy(source) third["record"]["dc:title"] = "Update 2" third["created_date"] = "2002-01-01T00:00:00Z" req3 = Request(third) req3.owner = "test" req3.action = "update" req3.save(blocking=True) # run the job again WorkflowApi.process_requests() time.sleep(2) # check the public record was updated pubs = pub_dao.find_by_doi("10.1234/me") assert len(pubs) == 1 assert ( pubs[0].record.get("dc:title") == "Update 2" ) # should have been updated, as there are only apc contributions from one source # check that the workflow state was updated wfs = wfs_dao.pull("requests") assert wfs is not None assert wfs.last_request == req3.created_date assert wfs.already_processed == [req2.id, req3.id] # processed records should have been appended # finally issue a delete request fourth = deepcopy(source) fourth["created_date"] = "2003-01-01T00:00:00Z" req4 = Request(fourth) req4.owner = "test" req4.action = "delete" req4.save(blocking=True) # run the job again WorkflowApi.process_requests() time.sleep(2) # check the public record was updated pubs = pub_dao.find_by_doi("10.1234/me") assert len(pubs) == 0 # check that the workflow state was updated wfs = wfs_dao.pull("requests") assert wfs is not None assert wfs.last_request == req4.created_date assert wfs.already_processed == [req4.id] # processed records should have been appended