def setup(self): super(TestURNLookupController, self).setup() self.controller = URNLookupController(self._db) self.source = DataSource.lookup(self._db, DataSource.INTERNAL_PROCESSING)
class TestURNLookupController(DatabaseTest): def setup(self): super(TestURNLookupController, self).setup() self.controller = URNLookupController(self._db) self.source = DataSource.lookup(self._db, DataSource.INTERNAL_PROCESSING) def assert_one_message(self, urn, code, message): """Assert that the given message is the only thing in the feed. """ [obj] = self.controller.precomposed_entries expect = OPDSMessage(urn, code, message) assert isinstance(obj, OPDSMessage) eq_(urn, obj.urn) eq_(code, obj.status_code) eq_(message, obj.message) eq_([], self.controller.works) def test_process_urn_initial_registration(self): urn = Identifier.URN_SCHEME_PREFIX + "Overdrive ID/nosuchidentifier" self.controller.process_urn(urn) self.assert_one_message( urn, 201, URNLookupController.IDENTIFIER_REGISTERED ) # The Identifier has been created and given a CoverageRecord # with a transient failure. [identifier] = self._db.query(Identifier).filter( Identifier.type==Identifier.OVERDRIVE_ID ).all() eq_("nosuchidentifier", identifier.identifier) [coverage] = identifier.coverage_records eq_(CoverageRecord.TRANSIENT_FAILURE, coverage.status) def test_process_urn_pending_resolve_attempt(self): # Simulate calling process_urn twice, and make sure the # second call results in an "I'm working on it, hold your horses" message. identifier = self._identifier(Identifier.GUTENBERG_ID) record, is_new = CoverageRecord.add_for( identifier, self.source, self.controller.OPERATION, status=CoverageRecord.TRANSIENT_FAILURE ) record.exception = self.controller.NO_WORK_DONE_EXCEPTION self.controller.process_urn(identifier.urn) self.assert_one_message( identifier.urn, HTTP_ACCEPTED, URNLookupController.WORKING_TO_RESOLVE_IDENTIFIER ) def test_process_urn_exception_during_resolve_attempt(self): identifier = self._identifier(Identifier.GUTENBERG_ID) record, is_new = CoverageRecord.add_for( identifier, self.source, self.controller.OPERATION, status=CoverageRecord.PERSISTENT_FAILURE ) record.exception = "foo" self.controller.process_urn(identifier.urn) self.assert_one_message( identifier.urn, HTTP_INTERNAL_SERVER_ERROR, "foo" ) def test_process_urn_no_presentation_ready_work(self): identifier = self._identifier(Identifier.GUTENBERG_ID) # There's a record of success, but no presentation-ready work. record, is_new = CoverageRecord.add_for( identifier, self.source, self.controller.OPERATION, status=CoverageRecord.SUCCESS ) self.controller.process_urn(identifier.urn) self.assert_one_message( identifier.urn, HTTP_INTERNAL_SERVER_ERROR, self.controller.SUCCESS_DID_NOT_RESULT_IN_PRESENTATION_READY_WORK ) def test_process_urn_unresolvable_type(self): # We can't resolve a 3M identifier because we don't have the # appropriate access to the bibliographic API. identifier = self._identifier(Identifier.THREEM_ID) self.controller.process_urn(identifier.urn) self.assert_one_message( identifier.urn, HTTP_NOT_FOUND, self.controller.UNRESOLVABLE_IDENTIFIER ) def test_presentation_ready_work_overrides_unresolveable_type(self): # If there is a presentation-ready Work associated # with the identifier, turns out we can resolve it even if the # type would otherwise not be resolvable. edition, pool = self._edition( identifier_type=Identifier.THREEM_ID, with_license_pool=True ) pool.open_access = False work, is_new = pool.calculate_work() work.presentation_ready = True identifier = edition.primary_identifier self.controller.process_urn(identifier.urn) eq_([(identifier, work)], self.controller.works) def test_process_urn_with_collection(self): collection = self._collection() i1 = self._identifier() i2 = self._identifier() eq_([], collection.catalog) self.controller.process_urn(i1.urn, collection=collection) eq_(1, len(collection.catalog)) eq_([i1], collection.catalog) # Adds new identifiers to an existing catalog self.controller.process_urn(i2.urn, collection=collection) eq_(2, len(collection.catalog)) assert i1 in collection.catalog assert i2 in collection.catalog # Does not duplicate identifiers in the catalog self.controller.process_urn(i1.urn, collection=collection) eq_(2, len(collection.catalog)) assert i1 in collection.catalog assert i2 in collection.catalog def test_process_urn_isbn(self): # Create a new ISBN identifier. # Ask online providers for metadata to turn into an opds feed about this identifier. # Make sure a coverage record was created, and a 201 status obtained from provider. # Ask online provider again, and make sure we're now getting a 202 "working on it" status. # Ask again, this time getting a result. Make sure know that got a result. isbn, ignore = Identifier.for_foreign_id( self._db, Identifier.ISBN, self._isbn ) # The first time we look up an ISBN a CoverageRecord is created # representing the work to be done. self.controller.process_urn(isbn.urn) self.assert_one_message( isbn.urn, HTTP_CREATED, self.controller.IDENTIFIER_REGISTERED ) [record] = isbn.coverage_records eq_(record.exception, self.controller.NO_WORK_DONE_EXCEPTION) eq_(record.status, CoverageRecord.TRANSIENT_FAILURE) # So long as the necessary coverage is not provided, # future lookups will not provide useful information self.controller.precomposed_entries = [] self.controller.process_urn(isbn.urn) self.assert_one_message( isbn.urn, HTTP_ACCEPTED, self.controller.WORKING_TO_RESOLVE_IDENTIFIER ) # Let's provide the coverage. metadata_sources = DataSource.metadata_sources_for( self._db, isbn ) for source in metadata_sources: CoverageRecord.add_for(isbn, source) # Process the ISBN again, and we get an <entry> tag with the # information. self.controller.precomposed_entries = [] self.controller.process_urn(isbn.urn) expect = isbn.opds_entry() [actual] = self.controller.precomposed_entries eq_(etree.tostring(expect), etree.tostring(actual))
def lookup(collection_metadata_identifier=None): return URNLookupController(app._db).work_lookup( VerboseAnnotator, require_active_licensepool=False, collection_details=collection_metadata_identifier)
class TestURNLookupController(ControllerTest): def setup(self): super(TestURNLookupController, self).setup() self.controller = URNLookupController(self._db) self.source = DataSource.lookup(self._db, DataSource.INTERNAL_PROCESSING) def basic_request_context(f): @wraps(f) def decorated(*args, **kwargs): from app import app with app.test_request_context('/'): return f(*args, **kwargs) return decorated @basic_request_context def assert_one_message(self, urn, code, message): """Assert that the given message is the only thing in the feed. """ [obj] = self.controller.precomposed_entries expect = OPDSMessage(urn, code, message) assert isinstance(obj, OPDSMessage) eq_(urn, obj.urn) eq_(code, obj.status_code) eq_(message, obj.message) eq_([], self.controller.works) @basic_request_context def test_process_urn_initial_registration(self): urn = Identifier.URN_SCHEME_PREFIX + "Overdrive ID/nosuchidentifier" self.controller.process_urn(urn) self.assert_one_message(urn, 201, URNLookupController.IDENTIFIER_REGISTERED) # The Identifier has been created and given a CoverageRecord # with a transient failure. [identifier] = self._db.query(Identifier).filter( Identifier.type == Identifier.OVERDRIVE_ID).all() eq_("nosuchidentifier", identifier.identifier) [coverage] = identifier.coverage_records eq_(CoverageRecord.TRANSIENT_FAILURE, coverage.status) @basic_request_context def test_process_urn_pending_resolve_attempt(self): # Simulate calling process_urn twice, and make sure the # second call results in an "I'm working on it, hold your horses" message. identifier = self._identifier(Identifier.GUTENBERG_ID) record, is_new = CoverageRecord.add_for( identifier, self.source, self.controller.OPERATION, status=CoverageRecord.TRANSIENT_FAILURE) record.exception = self.controller.NO_WORK_DONE_EXCEPTION self.controller.process_urn(identifier.urn) self.assert_one_message( identifier.urn, HTTP_ACCEPTED, URNLookupController.WORKING_TO_RESOLVE_IDENTIFIER) @basic_request_context def test_process_urn_exception_during_resolve_attempt(self): identifier = self._identifier(Identifier.GUTENBERG_ID) record, is_new = CoverageRecord.add_for( identifier, self.source, self.controller.OPERATION, status=CoverageRecord.PERSISTENT_FAILURE) record.exception = "foo" self.controller.process_urn(identifier.urn) self.assert_one_message(identifier.urn, HTTP_INTERNAL_SERVER_ERROR, "foo") @basic_request_context def test_process_urn_no_presentation_ready_work(self): identifier = self._identifier(Identifier.GUTENBERG_ID) # There's a record of success, but no presentation-ready work. record, is_new = CoverageRecord.add_for(identifier, self.source, self.controller.OPERATION, status=CoverageRecord.SUCCESS) self.controller.process_urn(identifier.urn) self.assert_one_message( identifier.urn, HTTP_INTERNAL_SERVER_ERROR, self.controller.SUCCESS_DID_NOT_RESULT_IN_PRESENTATION_READY_WORK) @basic_request_context def test_process_urn_unresolvable_type(self): # We can't resolve a 3M identifier because we don't have the # appropriate access to the bibliographic API. identifier = self._identifier(Identifier.THREEM_ID) self.controller.process_urn(identifier.urn) self.assert_one_message(identifier.urn, HTTP_NOT_FOUND, self.controller.UNRESOLVABLE_IDENTIFIER) @basic_request_context def test_presentation_ready_work_overrides_unresolveable_type(self): # If there is a presentation-ready Work associated # with the identifier, turns out we can resolve it even if the # type would otherwise not be resolvable. edition, pool = self._edition(identifier_type=Identifier.THREEM_ID, with_license_pool=True) pool.open_access = False work, is_new = pool.calculate_work() work.presentation_ready = True identifier = edition.primary_identifier self.controller.process_urn(identifier.urn) eq_([(identifier, work)], self.controller.works) def test_process_urn_with_collection(self): name = base64.b64encode( (ExternalIntegration.OPDS_IMPORT + ':' + self._url), '-_') collection = self._collection(name=name, url=self._url) with self.app.test_request_context('/', headers=self.valid_auth): i1 = self._identifier() i2 = self._identifier() eq_([], collection.catalog) self.controller.process_urn(i1.urn, collection_details=name) eq_(1, len(collection.catalog)) eq_([i1], collection.catalog) # Adds new identifiers to an existing collection's catalog self.controller.process_urn(i2.urn, collection_details=name) eq_(2, len(collection.catalog)) eq_(sorted([i1, i2]), sorted(collection.catalog)) # Does not duplicate identifiers in the collection's catalog self.controller.process_urn(i1.urn, collection_details=name) eq_(2, len(collection.catalog)) eq_(sorted([i1, i2]), sorted(collection.catalog)) with self.app.test_request_context('/'): # Does not add identifiers to a collection if it isn't # sent by an authenticated client, even if there's a # collection attached. i3 = self._identifier() self.controller.process_urn(i3.urn, collection_details=name) assert i3 not in collection.catalog @basic_request_context def test_process_urn_isbn(self): # Create a new ISBN identifier. # Ask online providers for metadata to turn into an opds feed about this identifier. # Make sure a coverage record was created, and a 201 status obtained from provider. # Ask online provider again, and make sure we're now getting a 202 "working on it" status. # Ask again, this time getting a result. Make sure know that got a result. isbn, ignore = Identifier.for_foreign_id(self._db, Identifier.ISBN, self._isbn) # The first time we look up an ISBN a CoverageRecord is created # representing the work to be done. self.controller.process_urn(isbn.urn) self.assert_one_message(isbn.urn, HTTP_CREATED, self.controller.IDENTIFIER_REGISTERED) [record] = isbn.coverage_records eq_(record.exception, self.controller.NO_WORK_DONE_EXCEPTION) eq_(record.status, CoverageRecord.TRANSIENT_FAILURE) # So long as the necessary coverage is not provided, # future lookups will not provide useful information self.controller.precomposed_entries = [] self.controller.process_urn(isbn.urn) self.assert_one_message(isbn.urn, HTTP_ACCEPTED, self.controller.WORKING_TO_RESOLVE_IDENTIFIER) # Let's provide the coverage. metadata_sources = DataSource.metadata_sources_for(self._db, isbn) for source in metadata_sources: CoverageRecord.add_for(isbn, source) # Process the ISBN again, and we get an <entry> tag with the # information. self.controller.precomposed_entries = [] self.controller.process_urn(isbn.urn) expect = isbn.opds_entry() [actual] = self.controller.precomposed_entries eq_(etree.tostring(expect), etree.tostring(actual))