def test_import_audiobook(self, importer, mock_get, datasource, db): """Ensure that ODL2Importer2 correctly processes and imports a feed with an audiobook.""" license = self.get_data("license-audiobook.json") feed = self.get_data("feed-audiobook.json") mock_get.add(license) configuration_storage = ConfigurationStorage(importer) configuration_factory = ConfigurationFactory() with configuration_factory.create( configuration_storage, db, ODL2APIConfiguration ) as configuration: configuration.skipped_license_formats = json.dumps(["text/html"]) imported_editions, pools, works, failures = importer.import_from_feed(feed) # Make sure we imported one edition and it is an audiobook assert isinstance(imported_editions, list) assert 1 == len(imported_editions) [edition] = imported_editions assert isinstance(edition, Edition) assert edition.primary_identifier.identifier == "9780792766919" assert edition.primary_identifier.type == "ISBN" assert EditionConstants.AUDIO_MEDIUM == edition.medium # Make sure that license pools have correct configuration assert isinstance(pools, list) assert 1 == len(pools) [license_pool] = pools assert not license_pool.open_access assert 1 == license_pool.licenses_owned assert 1 == license_pool.licenses_available assert 2 == len(license_pool.delivery_mechanisms) lcp_delivery_mechanism = ( self._get_delivery_mechanism_by_drm_scheme_and_content_type( license_pool.delivery_mechanisms, MediaTypes.AUDIOBOOK_PACKAGE_LCP_MEDIA_TYPE, DeliveryMechanism.LCP_DRM, ) ) assert lcp_delivery_mechanism is not None feedbooks_delivery_mechanism = ( self._get_delivery_mechanism_by_drm_scheme_and_content_type( license_pool.delivery_mechanisms, MediaTypes.AUDIOBOOK_MANIFEST_MEDIA_TYPE, DeliveryMechanism.FEEDBOOKS_AUDIOBOOK_DRM, ) ) assert feedbooks_delivery_mechanism is not None
def test_local_lcpencrypt( self, _, file_path, lcpencrypt_output, expected_result, expected_exception=None, create_file=True, ): # Arrange integration_owner = create_autospec(spec=HasExternalIntegration) integration_owner.external_integration = MagicMock( return_value=self._integration) configuration_storage = ConfigurationStorage(integration_owner) configuration_factory = ConfigurationFactory() encryptor = LCPEncryptor(configuration_storage, configuration_factory) identifier = Identifier(identifier=fixtures.BOOK_IDENTIFIER) with configuration_factory.create( configuration_storage, self._db, LCPEncryptionConfiguration) as configuration: configuration.lcpencrypt_location = ( LCPEncryptionConfiguration.DEFAULT_LCPENCRYPT_LOCATION) with Patcher() as patcher: patcher.fs.create_file( LCPEncryptionConfiguration.DEFAULT_LCPENCRYPT_LOCATION) if create_file: patcher.fs.create_file(file_path) with patch("subprocess.check_output" ) as subprocess_check_output_mock: subprocess_check_output_mock.return_value = lcpencrypt_output if expected_exception: with pytest.raises(expected_exception.__class__ ) as exception_metadata: encryptor.encrypt(self._db, file_path, identifier.identifier) # Assert assert exception_metadata.value == expected_exception else: # Assert result = encryptor.encrypt(self._db, file_path, identifier.identifier) assert result == expected_result
class TestLCPAPI(DatabaseTest): def setup_method(self, mock_search=True): super(TestLCPAPI, self).setup_method() self._lcp_collection = self._collection( protocol=ExternalIntegration.LCP) self._integration = self._lcp_collection.external_integration integration_association = create_autospec(spec=HasExternalIntegration) integration_association.external_integration = MagicMock( return_value=self._integration) self._configuration_storage = ConfigurationStorage( integration_association) self._configuration_factory = ConfigurationFactory() def test_settings(self): # Assert assert len(LCPAPI.SETTINGS) == 12 # lcpserver_url assert (LCPAPI.SETTINGS[0][ConfigurationAttribute.KEY.value] == LCPServerConfiguration.lcpserver_url.key) assert (LCPAPI.SETTINGS[0][ConfigurationAttribute.LABEL.value] == LCPServerConfiguration.lcpserver_url.label) assert (LCPAPI.SETTINGS[0][ConfigurationAttribute.DESCRIPTION.value] == LCPServerConfiguration.lcpserver_url.description) assert LCPAPI.SETTINGS[0][ConfigurationAttribute.TYPE.value] == None assert (LCPAPI.SETTINGS[0][ConfigurationAttribute.REQUIRED.value] == LCPServerConfiguration.lcpserver_url.required) assert (LCPAPI.SETTINGS[0][ConfigurationAttribute.DEFAULT.value] == LCPServerConfiguration.lcpserver_url.default) assert (LCPAPI.SETTINGS[0][ConfigurationAttribute.CATEGORY.value] == LCPServerConfiguration.lcpserver_url.category) # lcpserver_user assert (LCPAPI.SETTINGS[1][ConfigurationAttribute.KEY.value] == LCPServerConfiguration.lcpserver_user.key) assert (LCPAPI.SETTINGS[1][ConfigurationAttribute.LABEL.value] == LCPServerConfiguration.lcpserver_user.label) assert (LCPAPI.SETTINGS[1][ConfigurationAttribute.DESCRIPTION.value] == LCPServerConfiguration.lcpserver_user.description) assert LCPAPI.SETTINGS[1][ConfigurationAttribute.TYPE.value] == None assert (LCPAPI.SETTINGS[1][ConfigurationAttribute.REQUIRED.value] == LCPServerConfiguration.lcpserver_user.required) assert (LCPAPI.SETTINGS[1][ConfigurationAttribute.DEFAULT.value] == LCPServerConfiguration.lcpserver_user.default) assert (LCPAPI.SETTINGS[1][ConfigurationAttribute.CATEGORY.value] == LCPServerConfiguration.lcpserver_user.category) # lcpserver_password assert (LCPAPI.SETTINGS[2][ConfigurationAttribute.KEY.value] == LCPServerConfiguration.lcpserver_password.key) assert (LCPAPI.SETTINGS[2][ConfigurationAttribute.LABEL.value] == LCPServerConfiguration.lcpserver_password.label) assert (LCPAPI.SETTINGS[2][ConfigurationAttribute.DESCRIPTION.value] == LCPServerConfiguration.lcpserver_password.description) assert LCPAPI.SETTINGS[2][ConfigurationAttribute.TYPE.value] == None assert (LCPAPI.SETTINGS[2][ConfigurationAttribute.REQUIRED.value] == LCPServerConfiguration.lcpserver_password.required) assert (LCPAPI.SETTINGS[2][ConfigurationAttribute.DEFAULT.value] == LCPServerConfiguration.lcpserver_password.default) assert (LCPAPI.SETTINGS[2][ConfigurationAttribute.CATEGORY.value] == LCPServerConfiguration.lcpserver_password.category) # lcpserver_input_directory assert (LCPAPI.SETTINGS[3][ConfigurationAttribute.KEY.value] == LCPServerConfiguration.lcpserver_input_directory.key) assert (LCPAPI.SETTINGS[3][ConfigurationAttribute.LABEL.value] == LCPServerConfiguration.lcpserver_input_directory.label) assert (LCPAPI.SETTINGS[3][ConfigurationAttribute.DESCRIPTION.value] == LCPServerConfiguration.lcpserver_input_directory.description) assert LCPAPI.SETTINGS[3][ConfigurationAttribute.TYPE.value] == None assert (LCPAPI.SETTINGS[3][ConfigurationAttribute.REQUIRED.value] == LCPServerConfiguration.lcpserver_input_directory.required) assert (LCPAPI.SETTINGS[3][ConfigurationAttribute.DEFAULT.value] == LCPServerConfiguration.lcpserver_input_directory.default) assert (LCPAPI.SETTINGS[3][ConfigurationAttribute.CATEGORY.value] == LCPServerConfiguration.lcpserver_input_directory.category) # lcpserver_page_size assert (LCPAPI.SETTINGS[4][ConfigurationAttribute.KEY.value] == LCPServerConfiguration.lcpserver_page_size.key) assert (LCPAPI.SETTINGS[4][ConfigurationAttribute.LABEL.value] == LCPServerConfiguration.lcpserver_page_size.label) assert (LCPAPI.SETTINGS[4][ConfigurationAttribute.DESCRIPTION.value] == LCPServerConfiguration.lcpserver_page_size.description) assert LCPAPI.SETTINGS[4][ ConfigurationAttribute.TYPE.value] == "number" assert (LCPAPI.SETTINGS[4][ConfigurationAttribute.REQUIRED.value] == LCPServerConfiguration.lcpserver_page_size.required) assert (LCPAPI.SETTINGS[4][ConfigurationAttribute.DEFAULT.value] == LCPServerConfiguration.lcpserver_page_size.default) assert (LCPAPI.SETTINGS[4][ConfigurationAttribute.CATEGORY.value] == LCPServerConfiguration.lcpserver_page_size.category) # provider_name assert (LCPAPI.SETTINGS[5][ConfigurationAttribute.KEY.value] == LCPServerConfiguration.provider_name.key) assert (LCPAPI.SETTINGS[5][ConfigurationAttribute.LABEL.value] == LCPServerConfiguration.provider_name.label) assert (LCPAPI.SETTINGS[5][ConfigurationAttribute.DESCRIPTION.value] == LCPServerConfiguration.provider_name.description) assert LCPAPI.SETTINGS[5][ConfigurationAttribute.TYPE.value] == None assert (LCPAPI.SETTINGS[5][ConfigurationAttribute.REQUIRED.value] == LCPServerConfiguration.provider_name.required) assert (LCPAPI.SETTINGS[5][ConfigurationAttribute.DEFAULT.value] == LCPServerConfiguration.provider_name.default) assert (LCPAPI.SETTINGS[5][ConfigurationAttribute.CATEGORY.value] == LCPServerConfiguration.provider_name.category) # passphrase_hint assert (LCPAPI.SETTINGS[6][ConfigurationAttribute.KEY.value] == LCPServerConfiguration.passphrase_hint.key) assert (LCPAPI.SETTINGS[6][ConfigurationAttribute.LABEL.value] == LCPServerConfiguration.passphrase_hint.label) assert (LCPAPI.SETTINGS[6][ConfigurationAttribute.DESCRIPTION.value] == LCPServerConfiguration.passphrase_hint.description) assert LCPAPI.SETTINGS[6][ConfigurationAttribute.TYPE.value] == None assert (LCPAPI.SETTINGS[6][ConfigurationAttribute.REQUIRED.value] == LCPServerConfiguration.passphrase_hint.required) assert (LCPAPI.SETTINGS[6][ConfigurationAttribute.DEFAULT.value] == LCPServerConfiguration.passphrase_hint.default) assert (LCPAPI.SETTINGS[6][ConfigurationAttribute.CATEGORY.value] == LCPServerConfiguration.passphrase_hint.category) # encryption_algorithm assert (LCPAPI.SETTINGS[7][ConfigurationAttribute.KEY.value] == LCPServerConfiguration.encryption_algorithm.key) assert (LCPAPI.SETTINGS[7][ConfigurationAttribute.LABEL.value] == LCPServerConfiguration.encryption_algorithm.label) assert (LCPAPI.SETTINGS[7][ConfigurationAttribute.DESCRIPTION.value] == LCPServerConfiguration.encryption_algorithm.description) assert (LCPAPI.SETTINGS[7][ConfigurationAttribute.TYPE.value] == LCPServerConfiguration.encryption_algorithm.type.value) assert (LCPAPI.SETTINGS[7][ConfigurationAttribute.REQUIRED.value] == LCPServerConfiguration.encryption_algorithm.required) assert (LCPAPI.SETTINGS[7][ConfigurationAttribute.DEFAULT.value] == LCPServerConfiguration.encryption_algorithm.default) assert (LCPAPI.SETTINGS[7][ConfigurationAttribute.CATEGORY.value] == LCPServerConfiguration.encryption_algorithm.category) # max_printable_pages assert (LCPAPI.SETTINGS[8][ConfigurationAttribute.KEY.value] == LCPServerConfiguration.max_printable_pages.key) assert (LCPAPI.SETTINGS[8][ConfigurationAttribute.LABEL.value] == LCPServerConfiguration.max_printable_pages.label) assert (LCPAPI.SETTINGS[8][ConfigurationAttribute.DESCRIPTION.value] == LCPServerConfiguration.max_printable_pages.description) assert LCPAPI.SETTINGS[8][ ConfigurationAttribute.TYPE.value] == "number" assert (LCPAPI.SETTINGS[8][ConfigurationAttribute.REQUIRED.value] == LCPServerConfiguration.max_printable_pages.required) assert (LCPAPI.SETTINGS[8][ConfigurationAttribute.DEFAULT.value] == LCPServerConfiguration.max_printable_pages.default) assert (LCPAPI.SETTINGS[8][ConfigurationAttribute.CATEGORY.value] == LCPServerConfiguration.max_printable_pages.category) # max_copiable_pages assert (LCPAPI.SETTINGS[9][ConfigurationAttribute.KEY.value] == LCPServerConfiguration.max_copiable_pages.key) assert (LCPAPI.SETTINGS[9][ConfigurationAttribute.LABEL.value] == LCPServerConfiguration.max_copiable_pages.label) assert (LCPAPI.SETTINGS[9][ConfigurationAttribute.DESCRIPTION.value] == LCPServerConfiguration.max_copiable_pages.description) assert LCPAPI.SETTINGS[9][ ConfigurationAttribute.TYPE.value] == "number" assert (LCPAPI.SETTINGS[9][ConfigurationAttribute.REQUIRED.value] == LCPServerConfiguration.max_copiable_pages.required) assert (LCPAPI.SETTINGS[9][ConfigurationAttribute.DEFAULT.value] == LCPServerConfiguration.max_copiable_pages.default) assert (LCPAPI.SETTINGS[9][ConfigurationAttribute.CATEGORY.value] == LCPServerConfiguration.max_copiable_pages.category) # lcpencrypt_location assert (LCPAPI.SETTINGS[10][ConfigurationAttribute.KEY.value] == LCPEncryptionConfiguration.lcpencrypt_location.key) assert (LCPAPI.SETTINGS[10][ConfigurationAttribute.LABEL.value] == LCPEncryptionConfiguration.lcpencrypt_location.label) assert (LCPAPI.SETTINGS[10][ConfigurationAttribute.DESCRIPTION.value] == LCPEncryptionConfiguration.lcpencrypt_location.description) assert LCPAPI.SETTINGS[10][ConfigurationAttribute.TYPE.value] == None assert (LCPAPI.SETTINGS[10][ConfigurationAttribute.REQUIRED.value] == LCPEncryptionConfiguration.lcpencrypt_location.required) assert (LCPAPI.SETTINGS[10][ConfigurationAttribute.DEFAULT.value] == LCPEncryptionConfiguration.lcpencrypt_location.default) assert (LCPAPI.SETTINGS[10][ConfigurationAttribute.CATEGORY.value] == LCPEncryptionConfiguration.lcpencrypt_location.category) # lcpencrypt_output_directory assert (LCPAPI.SETTINGS[11][ConfigurationAttribute.KEY.value] == LCPEncryptionConfiguration.lcpencrypt_output_directory.key) assert (LCPAPI.SETTINGS[11][ConfigurationAttribute.LABEL.value] == LCPEncryptionConfiguration.lcpencrypt_output_directory.label) assert ( LCPAPI.SETTINGS[11][ConfigurationAttribute.DESCRIPTION.value] == LCPEncryptionConfiguration.lcpencrypt_output_directory.description) assert LCPAPI.SETTINGS[11][ConfigurationAttribute.TYPE.value] == None assert ( LCPAPI.SETTINGS[11][ConfigurationAttribute.REQUIRED.value] == LCPEncryptionConfiguration.lcpencrypt_output_directory.required) assert (LCPAPI.SETTINGS[11][ConfigurationAttribute.DEFAULT.value] == LCPEncryptionConfiguration.lcpencrypt_output_directory.default) assert ( LCPAPI.SETTINGS[11][ConfigurationAttribute.CATEGORY.value] == LCPEncryptionConfiguration.lcpencrypt_output_directory.category) @freeze_time("2020-01-01 00:00:00") def test_checkout_without_existing_loan(self): # Arrange lcp_api = LCPAPI(self._db, self._lcp_collection) patron = self._patron() days = self._lcp_collection.default_loan_period(patron.library) start_date = utc_now() end_date = start_date + datetime.timedelta(days=days) data_source = DataSource.lookup(self._db, DataSource.LCP, autocreate=True) data_source_name = data_source.name edition = self._edition(data_source_name=data_source_name, identifier_id=fixtures.CONTENT_ID) license_pool = self._licensepool( edition=edition, data_source_name=data_source_name, collection=self._lcp_collection, ) lcp_license = json.loads(fixtures.LCPSERVER_LICENSE) lcp_server_mock = create_autospec(spec=LCPServer) lcp_server_mock.generate_license = MagicMock(return_value=lcp_license) with self._configuration_factory.create( self._configuration_storage, self._db, LCPServerConfiguration) as configuration: with patch( "api.lcp.collection.LCPServer") as lcp_server_constructor: lcp_server_constructor.return_value = lcp_server_mock configuration.lcpserver_url = fixtures.LCPSERVER_URL configuration.lcpserver_user = fixtures.LCPSERVER_USER configuration.lcpserver_password = fixtures.LCPSERVER_PASSWORD configuration.lcpserver_input_directory = ( fixtures.LCPSERVER_INPUT_DIRECTORY) configuration.provider_name = fixtures.PROVIDER_NAME configuration.passphrase_hint = fixtures.TEXT_HINT configuration.encryption_algorithm = ( LCPServerConfiguration.DEFAULT_ENCRYPTION_ALGORITHM) # Act loan = lcp_api.checkout(patron, "pin", license_pool, "internal format") # Assert assert loan.collection_id == self._lcp_collection.id assert loan.collection(self._db) == self._lcp_collection assert loan.license_pool(self._db) == license_pool assert loan.data_source_name == data_source_name assert loan.identifier_type == license_pool.identifier.type assert loan.external_identifier == lcp_license["id"] assert loan.start_date == start_date assert loan.end_date == end_date lcp_server_mock.generate_license.assert_called_once_with( self._db, fixtures.CONTENT_ID, patron, start_date, end_date) @freeze_time("2020-01-01 00:00:00") def test_checkout_with_existing_loan(self): # Arrange lcp_api = LCPAPI(self._db, self._lcp_collection) patron = self._patron() days = self._lcp_collection.default_loan_period(patron.library) start_date = utc_now() end_date = start_date + datetime.timedelta(days=days) data_source = DataSource.lookup(self._db, DataSource.LCP, autocreate=True) data_source_name = data_source.name edition = self._edition(data_source_name=data_source_name, identifier_id=fixtures.CONTENT_ID) license_pool = self._licensepool( edition=edition, data_source_name=data_source_name, collection=self._lcp_collection, ) lcp_license = json.loads(fixtures.LCPSERVER_LICENSE) lcp_server_mock = create_autospec(spec=LCPServer) lcp_server_mock.get_license = MagicMock(return_value=lcp_license) loan_identifier = "e99be177-4902-426a-9b96-0872ae877e2f" license_pool.loan_to(patron, external_identifier=loan_identifier) with self._configuration_factory.create( self._configuration_storage, self._db, LCPServerConfiguration) as configuration: with patch( "api.lcp.collection.LCPServer") as lcp_server_constructor: lcp_server_constructor.return_value = lcp_server_mock configuration.lcpserver_url = fixtures.LCPSERVER_URL configuration.lcpserver_user = fixtures.LCPSERVER_USER configuration.lcpserver_password = fixtures.LCPSERVER_PASSWORD configuration.lcpserver_input_directory = ( fixtures.LCPSERVER_INPUT_DIRECTORY) configuration.provider_name = fixtures.PROVIDER_NAME configuration.passphrase_hint = fixtures.TEXT_HINT configuration.encryption_algorithm = ( LCPServerConfiguration.DEFAULT_ENCRYPTION_ALGORITHM) # Act loan = lcp_api.checkout(patron, "pin", license_pool, "internal format") # Assert assert loan.collection_id == self._lcp_collection.id assert loan.collection(self._db) == self._lcp_collection assert loan.license_pool(self._db) == license_pool assert loan.data_source_name == data_source_name assert loan.identifier_type == license_pool.identifier.type assert loan.external_identifier == loan_identifier assert loan.start_date == start_date assert loan.end_date == end_date lcp_server_mock.get_license.assert_called_once_with( self._db, loan_identifier, patron) @freeze_time("2020-01-01 00:00:00") def test_fulfil(self): # Arrange lcp_api = LCPAPI(self._db, self._lcp_collection) patron = self._patron() days = self._lcp_collection.default_loan_period(patron.library) today = utc_now() expires = today + datetime.timedelta(days=days) data_source = DataSource.lookup(self._db, DataSource.LCP, autocreate=True) data_source_name = data_source.name license_pool = self._licensepool( edition=None, data_source_name=data_source_name, collection=self._lcp_collection, ) lcp_license = json.loads(fixtures.LCPSERVER_LICENSE) lcp_server_mock = create_autospec(spec=LCPServer) lcp_server_mock.get_license = MagicMock(return_value=lcp_license) with self._configuration_factory.create( self._configuration_storage, self._db, LCPServerConfiguration) as configuration: with patch( "api.lcp.collection.LCPServer") as lcp_server_constructor: lcp_server_constructor.return_value = lcp_server_mock configuration.lcpserver_url = fixtures.LCPSERVER_URL configuration.lcpserver_user = fixtures.LCPSERVER_USER configuration.lcpserver_password = fixtures.LCPSERVER_PASSWORD configuration.lcpserver_input_directory = ( fixtures.LCPSERVER_INPUT_DIRECTORY) configuration.provider_name = fixtures.PROVIDER_NAME configuration.passphrase_hint = fixtures.TEXT_HINT configuration.encryption_algorithm = ( LCPServerConfiguration.DEFAULT_ENCRYPTION_ALGORITHM) # Act license_pool.loan_to( patron, start=today, end=expires, external_identifier=lcp_license["id"], ) fulfilment_info = lcp_api.fulfill(patron, "pin", license_pool, "internal format") # Assert assert isinstance(fulfilment_info, LCPFulfilmentInfo) == True assert fulfilment_info.collection_id == self._lcp_collection.id assert fulfilment_info.collection( self._db) == self._lcp_collection assert fulfilment_info.license_pool(self._db) == license_pool assert fulfilment_info.data_source_name == data_source_name assert fulfilment_info.identifier_type == license_pool.identifier.type lcp_server_mock.get_license.assert_called_once_with( self._db, lcp_license["id"], patron) def test_patron_activity_returns_correct_result(self): # Arrange lcp_api = LCPAPI(self._db, self._lcp_collection) # 1. Correct loan patron = self._patron() days = self._lcp_collection.default_loan_period(patron.library) today = utc_now() expires = today + datetime.timedelta(days=days) data_source = DataSource.lookup(self._db, DataSource.LCP, autocreate=True) data_source_name = data_source.name external_identifier = "1" license_pool = self._licensepool( edition=None, data_source_name=data_source_name, collection=self._lcp_collection, ) license_pool.loan_to(patron, start=today, end=expires, external_identifier=external_identifier) # 2. Loan from a different collection other_collection = self._collection( protocol=ExternalIntegration.MANUAL) other_external_identifier = "2" other_license_pool = self._licensepool( edition=None, data_source_name=data_source_name, collection=other_collection) other_license_pool.loan_to( patron, start=today, end=expires, external_identifier=other_external_identifier, ) # 3. Other patron's loan other_patron = self._patron() other_license_pool = self._licensepool( edition=None, data_source_name=data_source_name, collection=other_collection) other_license_pool.loan_to(other_patron, start=today, end=expires) # 4. Expired loan other_license_pool = self._licensepool( edition=None, data_source_name=data_source_name, collection=self._lcp_collection, ) other_license_pool.loan_to(patron, start=today, end=today - datetime.timedelta(days=1)) # 5. Not started loan other_license_pool = self._licensepool( edition=None, data_source_name=data_source_name, collection=self._lcp_collection, ) other_license_pool.loan_to( patron, start=today + datetime.timedelta(days=1), end=today + datetime.timedelta(days=2), ) # Act loans = lcp_api.patron_activity(patron, "pin") # Assert assert len(loans) == 1 loan = loans[0] assert loan.collection_id == self._lcp_collection.id assert loan.collection(self._db) == self._lcp_collection assert loan.license_pool(self._db) == license_pool assert loan.data_source_name == data_source_name assert loan.identifier_type == license_pool.identifier.type assert loan.external_identifier == external_identifier assert loan.start_date == today assert loan.end_date == expires
class ProQuestOPDS2Importer(OPDS2Importer, BaseCirculationAPI, HasExternalIntegration): """Allows to import ProQuest OPDS 2.0 feeds into Circulation Manager.""" NAME = ExternalIntegration.PROQUEST DESCRIPTION = _("Import books from a ProQuest OPDS 2.0 feed.") SETTINGS = (ProQuestOPDS2ImporterConfiguration.to_settings() + ProQuestAPIClientConfiguration.to_settings()) LIBRARY_SETTINGS = BaseCirculationAPI.LIBRARY_SETTINGS + [ BaseCirculationAPI.DEFAULT_LOAN_DURATION_SETTING ] def __init__( self, db, collection, parser=None, data_source_name=None, identifier_mapping=None, http_get=None, metadata_client=None, content_modifier=None, map_from_collection=None, mirrors=None, ): """Initialize a new instance of ProQuestOPDS2Importer class. :param db: Database session :type db: sqlalchemy.orm.session.Session :param collection: Circulation Manager's collection. LicensePools created by this OPDS2Import class will be associated with the given Collection. If this is None, no LicensePools will be created -- only Editions. :type collection: Collection :param parser: Feed parser :type parser: RWPMManifestParser :param data_source_name: Name of the source of this OPDS feed. All Editions created by this import will be associated with this DataSource. If there is no DataSource with this name, one will be created. NOTE: If `collection` is provided, its .data_source will take precedence over any value provided here. This is only for use when you are importing OPDS metadata without any particular Collection in mind. :type data_source_name: str :param identifier_mapping: Dictionary used for mapping external identifiers into a set of internal ones :type identifier_mapping: Dict :param metadata_client: A SimplifiedOPDSLookup object that is used to fill in missing metadata :type metadata_client: SimplifiedOPDSLookup :param content_modifier: A function that may modify-in-place representations (such as images and EPUB documents) as they come in from the network. :type content_modifier: Callable :param map_from_collection: Identifier mapping :type map_from_collection: Dict :param mirrors: A dictionary of different MirrorUploader objects for different purposes :type mirrors: Dict[MirrorUploader] """ super(ProQuestOPDS2Importer, self).__init__( db, collection, parser if parser else RWPMManifestParser(OPDS2FeedParserFactory()), data_source_name, identifier_mapping, http_get, metadata_client, content_modifier, map_from_collection, mirrors, ) self._logger = logging.getLogger(__name__) self._configuration_storage = ConfigurationStorage(self) self._configuration_factory = ConfigurationFactory() factory = ProQuestAPIClientFactory() self._api_client = factory.create(self) self._credential_manager = ProQuestCredentialManager() @contextmanager def _get_configuration(self, db): """Return the configuration object. :param db: Database session :type db: sqlalchemy.orm.session.Session :return: Configuration object :rtype: ProQuestOPDS2ImporterConfiguration """ with self._configuration_factory.create( self._configuration_storage, db, ProQuestOPDS2ImporterConfiguration) as configuration: yield configuration def _get_patron_affiliation_id(self, patron, configuration): """Get a patron's affiliation ID. :param patron: Patron object :type patron: core.model.patron.Patron :param configuration: Configuration object :type configuration: ProQuestOPDS2ImporterConfiguration :return: Patron's affiliation ID :rtype: Optional[str] """ affiliation_id = self._credential_manager.lookup_patron_affiliation_id( self._db, patron, configuration.affiliation_attributes) self._logger.info( "Patron {0} has the following SAML affiliation ID: {1}".format( patron, affiliation_id)) if not affiliation_id: affiliation_id = configuration.test_affiliation_id if not affiliation_id: self._logger.error( "Patron {0} does not have neither real affiliation ID " "nor test affiliation ID set up as a configuration setting" .format(patron)) raise MISSING_AFFILIATION_ID self._logger.info( "Since patron doesn't have an affiliation ID we set it to the test one: {1}" .format(patron, affiliation_id)) return affiliation_id def _create_proquest_token(self, patron, configuration): """Create a new ProQuest JWT bearer token. :param patron: Patron object :type patron: core.model.patron.Patron :param configuration: Configuration object :type configuration: ProQuestOPDS2ImporterConfiguration :return: ProQuest JWT bearer token :rtype: core.model.credential.Credential """ affiliation_id = self._get_patron_affiliation_id(patron, configuration) try: token = self._api_client.create_token(self._db, affiliation_id) token_expiration_timeout = ( int(configuration.token_expiration_timeout) if configuration.token_expiration_timeout else ProQuestOPDS2ImporterConfiguration. DEFAULT_TOKEN_EXPIRATION_TIMEOUT_SECONDS) token = self._credential_manager.save_proquest_token( self._db, patron, datetime.timedelta(seconds=token_expiration_timeout), token, ) return token except Exception as exception: self._logger.exception("Cannot create a ProQuest JWT bearer token") raise CannotCreateProQuestTokenError(str(exception)) def _get_or_create_proquest_token(self, patron, configuration): """Get an existing or create a new ProQuest JWT bearer token. :param patron: Patron object :type patron: core.model.patron.Patron :param configuration: Configuration object :type configuration: ProQuestOPDS2ImporterConfiguration :return: ProQuest JWT bearer token :rtype: core.model.credential.Credential """ token = self._credential_manager.lookup_proquest_token( self._db, patron) self._logger.info("Patron {0} has the following token: {1}".format( patron, token)) if not token: token = self._create_proquest_token(patron, configuration) return token def _get_book(self, patron, configuration, document_id): """Get a book's content (in the case of open-access books) or a book's link otherwise. :param patron: Patron object :type patron: core.model.patron.Patron :param configuration: Configuration object :type configuration: ProQuestOPDS2ImporterConfiguration :param document_id: ProQuest Doc ID :type document_id: str :return: Either an ACS link to the book or the book content :rtype: api.proquest.client.ProQuestBook """ token = self._get_or_create_proquest_token(patron, configuration) iterations = 0 while True: try: book = self._api_client.get_book(self._db, token.credential, document_id) return book except HTTPError as exception: if exception.response.status_code != 401 or iterations >= 1: raise else: token = self._create_proquest_token(patron, configuration) iterations += 1 def _extract_image_links(self, publication, feed_self_url): """Extracts a list of LinkData objects containing information about artwork. :param publication: Publication object :type publication: ast_core.Publication :param feed_self_url: Feed's self URL :type feed_self_url: str :return: List of links metadata :rtype: List[LinkData] """ self._logger.debug("Started extracting image links from {0}".format( encode(publication.images))) image_links = [] for image_link in publication.images.links: thumbnail_link = self._extract_link( image_link, feed_self_url, default_link_rel=Hyperlink.THUMBNAIL_IMAGE, ) thumbnail_link.rel = Hyperlink.THUMBNAIL_IMAGE cover_link = self._extract_link( image_link, feed_self_url, default_link_rel=Hyperlink.IMAGE, ) cover_link.rel = Hyperlink.IMAGE cover_link.thumbnail = thumbnail_link image_links.append(cover_link) self._logger.debug( "Finished extracting image links from {0}: {1}".format( encode(publication.images), encode(image_links))) return image_links def _extract_media_types_and_drm_scheme_from_link(self, link): """Extract information about content's media type and used DRM schema from the link. We consider viable the following two options: 1. DRM-free books { "rel": "http://opds-spec.org/acquisition", "href": "http://distributor.com/bookID", "type": "application/epub+zip" } 2. DRM-protected books { "rel": "http://opds-spec.org/acquisition", "href": "http://distributor.com/bookID", "type": "application/vnd.adobe.adept+xml", "properties": { "indirectAcquisition": [ { "type": "application/epub+zip" } ] } } :param link: Link object :type link: ast_core.Link :return: 2-tuple containing information about the content's media type and its DRM schema :rtype: List[Tuple[str, str]] """ self._logger.debug( "Started extracting media types and a DRM scheme from {0}".format( encode(link))) media_types_and_drm_scheme = [] if link.properties: if (not link.properties.availability or link.properties.availability.state == opds2_ast.OPDS2AvailabilityType.AVAILABLE.value): drm_scheme = (link.type if link.type in DeliveryMechanism.KNOWN_DRM_TYPES else DeliveryMechanism.NO_DRM) for acquisition_object in link.properties.indirect_acquisition: media_types_and_drm_scheme.append( (acquisition_object.type, drm_scheme)) else: if (link.type in MediaTypes.BOOK_MEDIA_TYPES or link.type in MediaTypes.AUDIOBOOK_MEDIA_TYPES): # Despite the fact that the book is DRM-free, we set its DRM type as DeliveryMechanism.BEARER_TOKEN. # We need it to allow the book to be downloaded by a client app. media_types_and_drm_scheme.append( (link.type, DeliveryMechanism.BEARER_TOKEN)) self._logger.debug( "Finished extracting media types and a DRM scheme from {0}: {1}". format(encode(link), encode(media_types_and_drm_scheme))) return media_types_and_drm_scheme def _is_identifier_allowed(self, identifier: Identifier) -> bool: """Check the identifier and return a boolean value indicating whether CM can import it. NOTE: Currently, this method hard codes allowed identifier types. The next PR will add an additional configuration setting allowing to override this behaviour and configure allowed identifier types in the CM Admin UI. :param identifier: Identifier object :type identifier: Identifier :return: Boolean value indicating whether CM can import the identifier :rtype: bool """ return identifier.type == Identifier.PROQUEST_ID def _parse_identifier(self, identifier): """Parse the identifier and return an Identifier object representing it. :param identifier: String containing the identifier :type identifier: str :return: Identifier object :rtype: Identifier """ return parse_identifier(self._db, identifier) def extract_next_links(self, feed): """Extract "next" links from the feed. :param feed: OPDS 2.0 feed :type feed: Union[str, opds2_ast.OPDS2Feed] :return: List of "next" links :rtype: List[str] """ return [] def patron_activity(self, patron, pin): """Return patron's loans. TODO This and code from ODLAPI should be refactored into a generic set of rules for any situation where the CM, not the remote API, is responsible for managing loans and holds. :param patron: A Patron object for the patron who wants to check out the book :type patron: Patron :param pin: The patron's alleged password :type pin: string :return: List of patron's loans :rtype: List[LoanInfo] """ now = utc_now() loans = ( self._db.query(Loan).join(LicensePool).join(Collection).filter( Collection.id == self._collection_id, Loan.patron == patron, or_(Loan.start == None, Loan.start <= now), or_(Loan.end == None, Loan.end > now), )).all() loan_info_objects = [] for loan in loans: licensepool = get_one(self._db, LicensePool, id=loan.license_pool_id) loan_info_objects.append( LoanInfo( collection=self.collection, data_source_name=licensepool.data_source.name, identifier_type=licensepool.identifier.type, identifier=licensepool.identifier.identifier, start_date=loan.start, end_date=loan.end, fulfillment_info=None, external_identifier=None, )) return loan_info_objects def place_hold(self, patron, pin, licensepool, notification_email_address): pass def release_hold(self, patron, pin, licensepool): pass def internal_format(self, delivery_mechanism): """Look up the internal format for this delivery mechanism or raise an exception. :param delivery_mechanism: A LicensePoolDeliveryMechanism :type delivery_mechanism: LicensePoolDeliveryMechanism """ return delivery_mechanism def checkout(self, patron, pin, licensepool, internal_format): """Checkout the book. NOTE: This method requires the patron to have either: - an active ProQuest JWT bearer token - or a SAML affiliation ID which will be used to create a new ProQuest JWT bearer token. """ self._logger.info("Started checking out '{0}' for patron {1}".format( internal_format, patron)) try: with self._get_configuration(self._db) as configuration: self._get_or_create_proquest_token(patron, configuration) loan_period = self.collection.default_loan_period( patron.library) start_time = utc_now() end_time = start_time + datetime.timedelta(days=loan_period) loan = LoanInfo( licensepool.collection, licensepool.data_source.name, identifier_type=licensepool.identifier.type, identifier=licensepool.identifier.identifier, start_date=start_time, end_date=end_time, fulfillment_info=None, external_identifier=None, ) self._logger.info( "Finished checking out {0} for patron {1}: {2}".format( internal_format, patron, loan)) return loan except BaseError as exception: self._logger.exception("Failed to check out {0} for patron {1}") raise CannotLoan(str(exception)) def fulfill( self, patron, pin, licensepool, internal_format=None, part=None, fulfill_part_url=None, ): """Fulfill the loan. NOTE: This method requires the patron to have either: - an active ProQuest JWT bearer token - or a SAML affiliation ID which will be used to create a new ProQuest JWT bearer token. """ self._logger.info("Started fulfilling '{0}' for patron {1}".format( internal_format, patron)) try: with self._get_configuration(self._db) as configuration: token = self._get_or_create_proquest_token( patron, configuration) book = self._get_book(patron, configuration, licensepool.identifier.identifier) if book.content is not None: fulfillment_info = FulfillmentInfo( licensepool.collection, licensepool.data_source.name, licensepool.identifier.type, licensepool.identifier.identifier, content_link=None, content_type=book.content_type if book.content_type else internal_format.delivery_mechanism.content_type, content=book.content, content_expires=None, ) else: now = utc_now() expires_in = (token.expires - now).total_seconds() token_document = dict( token_type="Bearer", access_token=token.credential, expires_in=expires_in, location=book.link, ) return FulfillmentInfo( licensepool.collection, licensepool.data_source.name, licensepool.identifier.type, licensepool.identifier.identifier, content_link=None, content_type=DeliveryMechanism.BEARER_TOKEN, content=json.dumps(token_document), content_expires=token.expires, ) self._logger.info( "Finished fulfilling {0} for patron {1}: {2}".format( internal_format, patron, fulfillment_info)) return fulfillment_info except BaseError as exception: self._logger.exception("Failed to fulfill out {0} for patron {1}") raise CannotFulfill(str(exception)) def external_integration(self, db): """Return an external integration associated with this object. :param db: Database session :type db: sqlalchemy.orm.session.Session :return: External integration associated with this object :rtype: core.model.configuration.ExternalIntegration """ return self.collection.external_integration
class ODL2Importer(OPDS2Importer, HasExternalIntegration): """Import information and formats from an ODL feed. The only change from OPDS2Importer is that this importer extracts FormatData and LicenseData from ODL 2.x's "licenses" arrays. """ NAME = ODL2API.NAME def __init__( self, db, collection, parser=None, data_source_name=None, identifier_mapping=None, http_get=None, metadata_client=None, content_modifier=None, map_from_collection=None, mirrors=None, ): """Initialize a new instance of ODL2Importer class. :param db: Database session :type db: sqlalchemy.orm.session.Session :param collection: Circulation Manager's collection. LicensePools created by this OPDS2Import class will be associated with the given Collection. If this is None, no LicensePools will be created -- only Editions. :type collection: Collection :param parser: Feed parser :type parser: RWPMManifestParser :param data_source_name: Name of the source of this OPDS feed. All Editions created by this import will be associated with this DataSource. If there is no DataSource with this name, one will be created. NOTE: If `collection` is provided, its .data_source will take precedence over any value provided here. This is only for use when you are importing OPDS metadata without any particular Collection in mind. :type data_source_name: str :param identifier_mapping: Dictionary used for mapping external identifiers into a set of internal ones :type identifier_mapping: Dict :param metadata_client: A SimplifiedOPDSLookup object that is used to fill in missing metadata :type metadata_client: SimplifiedOPDSLookup :param content_modifier: A function that may modify-in-place representations (such as images and EPUB documents) as they come in from the network. :type content_modifier: Callable :param map_from_collection: Identifier mapping :type map_from_collection: Dict :param mirrors: A dictionary of different MirrorUploader objects for different purposes :type mirrors: Dict[MirrorUploader] """ super(ODL2Importer, self).__init__( db, collection, parser if parser else RWPMManifestParser(ODLFeedParserFactory()), data_source_name, identifier_mapping, http_get, metadata_client, content_modifier, map_from_collection, mirrors, ) self._logger = logging.getLogger(__name__) self._configuration_storage = ConfigurationStorage(self) self._configuration_factory = ConfigurationFactory() @contextmanager def _get_configuration(self, db): """Return the configuration object. :param db: Database session :type db: sqlalchemy.orm.session.Session :return: Configuration object :rtype: ODL2APIConfiguration """ with self._configuration_factory.create( self._configuration_storage, db, ODL2APIConfiguration ) as configuration: yield configuration def _extract_publication_metadata(self, feed, publication, data_source_name): """Extract a Metadata object from webpub-manifest-parser's publication. :param publication: Feed object :type publication: opds2_ast.OPDS2Feed :param publication: Publication object :type publication: opds2_ast.OPDS2Publication :param data_source_name: Data source's name :type data_source_name: str :return: Publication's metadata :rtype: Metadata """ metadata = super(ODL2Importer, self)._extract_publication_metadata( feed, publication, data_source_name ) formats = [] licenses = [] medium = None with self._get_configuration(self._db) as configuration: skipped_license_formats = configuration.skipped_license_formats if skipped_license_formats: skipped_license_formats = set(skipped_license_formats) if publication.licenses: for odl_license in publication.licenses: identifier = odl_license.metadata.identifier checkout_link = first_or_default( odl_license.links.get_by_rel(OPDS2LinkRelationsRegistry.BORROW.key) ) if checkout_link: checkout_link = checkout_link.href license_info_document_link = first_or_default( odl_license.links.get_by_rel(OPDS2LinkRelationsRegistry.SELF.key) ) if license_info_document_link: license_info_document_link = license_info_document_link.href expires = ( to_utc(odl_license.metadata.terms.expires) if odl_license.metadata.terms else None ) concurrency = ( int(odl_license.metadata.terms.concurrency) if odl_license.metadata.terms else None ) if not license_info_document_link: parsed_license = None else: parsed_license = ODLImporter.get_license_data( license_info_document_link, checkout_link, identifier, expires, concurrency, self.http_get, ) if parsed_license is not None: licenses.append(parsed_license) # DPLA feed doesn't have information about a DRM protection used for audiobooks. # We want to try to extract that information from the License Info Document it's present there. license_formats = set(odl_license.metadata.formats) if parsed_license and parsed_license.content_types: license_formats |= set(parsed_license.content_types) for license_format in license_formats: if ( skipped_license_formats and license_format in skipped_license_formats ): continue if not medium: medium = Edition.medium_from_media_type(license_format) if license_format in ODLImporter.LICENSE_FORMATS: # Special case to handle DeMarque audiobooks which # include the protection in the content type drm_schemes = [ ODLImporter.LICENSE_FORMATS[license_format][ ODLImporter.DRM_SCHEME ] ] license_format = ODLImporter.LICENSE_FORMATS[license_format][ ODLImporter.CONTENT_TYPE ] else: drm_schemes = ( odl_license.metadata.protection.formats if odl_license.metadata.protection else [] ) for drm_scheme in drm_schemes or [None]: formats.append( FormatData( content_type=license_format, drm_scheme=drm_scheme, rights_uri=RightsStatus.IN_COPYRIGHT, ) ) metadata.circulation.licenses = licenses metadata.circulation.licenses_owned = None metadata.circulation.licenses_available = None metadata.circulation.licenses_reserved = None metadata.circulation.patrons_in_hold_queue = None metadata.circulation.formats.extend(formats) metadata.medium = medium return metadata def external_integration(self, db): return self.collection.external_integration
class TestLCPServer(DatabaseTest): def setup(self, mock_search=True): super(TestLCPServer, self).setup() self._lcp_collection = self._collection( protocol=ExternalIntegration.LCP) self._integration = self._lcp_collection.external_integration integration_owner = create_autospec(spec=HasExternalIntegration) integration_owner.external_integration = MagicMock( return_value=self._integration) self._configuration_storage = ConfigurationStorage(integration_owner) self._configuration_factory = ConfigurationFactory() self._hasher_factory = HasherFactory() self._credential_factory = LCPCredentialFactory() self._lcp_server = LCPServer(self._configuration_storage, self._configuration_factory, self._hasher_factory, self._credential_factory) @parameterized.expand([('empty_input_directory', ''), ('non_empty_input_directory', '/tmp/encrypted_books')]) def test_add_content(self, _, input_directory): # Arrange lcp_server = LCPServer(self._configuration_storage, self._configuration_factory, self._hasher_factory, self._credential_factory) encrypted_content = LCPEncryptionResult( content_id=fixtures.CONTENT_ID, content_encryption_key='12345', protected_content_location='/opt/readium/files/encrypted', protected_content_disposition='encrypted_book', protected_content_type='application/epub+zip', protected_content_length=12345, protected_content_sha256='12345') expected_protected_content_disposition = os.path.join( input_directory, encrypted_content.protected_content_disposition) with self._configuration_factory.create( self._configuration_storage, self._db, LCPServerConfiguration) as configuration: configuration.lcpserver_url = fixtures.LCPSERVER_URL configuration.lcpserver_user = fixtures.LCPSERVER_USER configuration.lcpserver_password = fixtures.LCPSERVER_PASSWORD configuration.lcpserver_input_directory = input_directory configuration.provider_name = fixtures.PROVIDER_NAME configuration.passphrase_hint = fixtures.TEXT_HINT configuration.encryption_algorithm = LCPServerConfiguration.DEFAULT_ENCRYPTION_ALGORITHM with requests_mock.Mocker() as request_mock: url = urlparse.urljoin( fixtures.LCPSERVER_URL, '/contents/{0}'.format(fixtures.CONTENT_ID)) request_mock.put(url) # Act lcp_server.add_content(self._db, encrypted_content) # Assert eq_(request_mock.called, True) json_request = json.loads(request_mock.last_request.text) eq_(json_request['content-id'], encrypted_content.content_id) eq_(json_request['content-encryption-key'], encrypted_content.content_encryption_key) eq_(json_request['protected-content-location'], expected_protected_content_disposition) eq_(json_request['protected-content-disposition'], encrypted_content.protected_content_disposition) eq_(json_request['protected-content-type'], encrypted_content.protected_content_type) eq_(json_request['protected-content-length'], encrypted_content.protected_content_length) eq_(json_request['protected-content-sha256'], encrypted_content.protected_content_sha256) @parameterized.expand([ ('none_rights', None, None, None, None), ('license_start', datetime.datetime(2020, 01, 01, 00, 00, 00), None, None, None), ('license_end', None, datetime.datetime(2020, 12, 31, 23, 59, 59), None, None), ('max_printable_pages', None, None, 10, None), ('max_printable_pages_empty_max_copiable_pages', None, None, 10, ''), ('empty_max_printable_pages', None, None, '', None), ('max_copiable_pages', None, None, None, 1024), ('empty_max_printable_pages_max_copiable_pages', None, None, '', 1024), ('empty_max_copiable_pages', None, None, None, ''), ('dates', datetime.datetime(2020, 01, 01, 00, 00, 00), datetime.datetime(2020, 12, 31, 23, 59, 59), None, None), ('full_rights', datetime.datetime(2020, 01, 01, 00, 00, 00), datetime.datetime(2020, 12, 31, 23, 59, 59), 10, 1024), ]) def test_generate_license(self, _, license_start, license_end, max_printable_pages, max_copiable_pages): # Arrange patron = self._patron() expected_patron_id = '52a190d1-cd69-4794-9d7a-1ec50392697f' expected_patron_passphrase = '52a190d1-cd69-4794-9d7a-1ec50392697a' expected_patron_key = self._hasher_factory \ .create(LCPServerConfiguration.DEFAULT_ENCRYPTION_ALGORITHM) \ .hash(expected_patron_passphrase) with self._configuration_factory.create( self._configuration_storage, self._db, LCPServerConfiguration) as configuration: configuration.lcpserver_url = fixtures.LCPSERVER_URL configuration.lcpserver_user = fixtures.LCPSERVER_USER configuration.lcpserver_password = fixtures.LCPSERVER_PASSWORD configuration.provider_name = fixtures.PROVIDER_NAME configuration.passphrase_hint = fixtures.TEXT_HINT configuration.encryption_algorithm = LCPServerConfiguration.DEFAULT_ENCRYPTION_ALGORITHM configuration.max_printable_pages = max_printable_pages configuration.max_copiable_pages = max_copiable_pages self._credential_factory.get_patron_id = MagicMock( return_value=expected_patron_id) self._credential_factory.get_patron_passphrase = MagicMock( return_value=expected_patron_passphrase) with requests_mock.Mocker() as request_mock: url = urlparse.urljoin( fixtures.LCPSERVER_URL, '/contents/{0}/license'.format(fixtures.CONTENT_ID)) request_mock.post(url, json=fixtures.LCPSERVER_LICENSE) # Act license = self._lcp_server.generate_license( self._db, fixtures.CONTENT_ID, patron, license_start, license_end) # Assert eq_(request_mock.called, True) eq_(license, fixtures.LCPSERVER_LICENSE) json_request = json.loads(request_mock.last_request.text) eq_(json_request['provider'], fixtures.PROVIDER_NAME) eq_(json_request['user']['id'], expected_patron_id) eq_(json_request['encryption']['user_key']['text_hint'], fixtures.TEXT_HINT) eq_(json_request['encryption']['user_key']['hex_value'], expected_patron_key) if license_start is not None: eq_(json_request['rights']['start'], utils.format_datetime(license_start)) if license_end is not None: eq_(json_request['rights']['end'], utils.format_datetime(license_end)) if max_printable_pages is not None and max_printable_pages != '': eq_(json_request['rights']['print'], max_printable_pages) if max_copiable_pages is not None and max_copiable_pages != '': eq_(json_request['rights']['copy'], max_copiable_pages) all_rights_fields_are_empty = all( map( lambda rights_field: rights_field is None or rights_field == '', [ license_start, license_end, max_printable_pages, max_copiable_pages ])) if all_rights_fields_are_empty: eq_('rights' in json_request, False) self._credential_factory.get_patron_id.assert_called_once_with( self._db, patron) self._credential_factory.get_patron_passphrase.assert_called_once_with( self._db, patron)
class TestLCPServer(DatabaseTest): def setup_method(self): super(TestLCPServer, self).setup_method() self._lcp_collection = self._collection( protocol=ExternalIntegration.LCP) self._integration = self._lcp_collection.external_integration integration_owner = create_autospec(spec=HasExternalIntegration) integration_owner.external_integration = MagicMock( return_value=self._integration) self._configuration_storage = ConfigurationStorage(integration_owner) self._configuration_factory = ConfigurationFactory() self._hasher_factory = HasherFactory() self._credential_factory = LCPCredentialFactory() self._lcp_server = LCPServer( self._configuration_storage, self._configuration_factory, self._hasher_factory, self._credential_factory, ) @parameterized.expand([ ("empty_input_directory", ""), ("non_empty_input_directory", "/tmp/encrypted_books"), ]) def test_add_content(self, _, input_directory): # Arrange lcp_server = LCPServer( self._configuration_storage, self._configuration_factory, self._hasher_factory, self._credential_factory, ) encrypted_content = LCPEncryptionResult( content_id=fixtures.CONTENT_ID, content_encryption_key="12345", protected_content_location="/opt/readium/files/encrypted", protected_content_disposition="encrypted_book", protected_content_type="application/epub+zip", protected_content_length=12345, protected_content_sha256="12345", ) expected_protected_content_disposition = os.path.join( input_directory, encrypted_content.protected_content_disposition) with self._configuration_factory.create( self._configuration_storage, self._db, LCPServerConfiguration) as configuration: configuration.lcpserver_url = fixtures.LCPSERVER_URL configuration.lcpserver_user = fixtures.LCPSERVER_USER configuration.lcpserver_password = fixtures.LCPSERVER_PASSWORD configuration.lcpserver_input_directory = input_directory configuration.provider_name = fixtures.PROVIDER_NAME configuration.passphrase_hint = fixtures.TEXT_HINT configuration.encryption_algorithm = ( LCPServerConfiguration.DEFAULT_ENCRYPTION_ALGORITHM) with requests_mock.Mocker() as request_mock: url = urllib.parse.urljoin( fixtures.LCPSERVER_URL, "/contents/{0}".format(fixtures.CONTENT_ID)) request_mock.put(url) # Act lcp_server.add_content(self._db, encrypted_content) # Assert assert request_mock.called == True json_request = json.loads(request_mock.last_request.text) assert json_request[ "content-id"] == encrypted_content.content_id assert (json_request["content-encryption-key"] == encrypted_content.content_encryption_key) assert (json_request["protected-content-location"] == expected_protected_content_disposition) assert (json_request["protected-content-disposition"] == encrypted_content.protected_content_disposition) assert (json_request["protected-content-type"] == encrypted_content.protected_content_type) assert (json_request["protected-content-length"] == encrypted_content.protected_content_length) assert (json_request["protected-content-sha256"] == encrypted_content.protected_content_sha256) @parameterized.expand([ ("none_rights", None, None, None, None), ( "license_start", datetime.datetime(2020, 1, 1, 00, 00, 00), None, None, None, ), ( "license_end", None, datetime.datetime(2020, 12, 31, 23, 59, 59), None, None, ), ("max_printable_pages", None, None, 10, None), ("max_printable_pages_empty_max_copiable_pages", None, None, 10, ""), ("empty_max_printable_pages", None, None, "", None), ("max_copiable_pages", None, None, None, 1024), ("empty_max_printable_pages_max_copiable_pages", None, None, "", 1024), ("empty_max_copiable_pages", None, None, None, ""), ( "dates", datetime.datetime(2020, 1, 1, 00, 00, 00), datetime.datetime(2020, 12, 31, 23, 59, 59), None, None, ), ( "full_rights", datetime.datetime(2020, 1, 1, 00, 00, 00), datetime.datetime(2020, 12, 31, 23, 59, 59), 10, 1024, ), ]) def test_generate_license(self, _, license_start, license_end, max_printable_pages, max_copiable_pages): # Arrange patron = self._patron() expected_patron_id = "52a190d1-cd69-4794-9d7a-1ec50392697f" expected_patron_passphrase = "52a190d1-cd69-4794-9d7a-1ec50392697a" expected_patron_key = self._hasher_factory.create( LCPServerConfiguration.DEFAULT_ENCRYPTION_ALGORITHM).hash( expected_patron_passphrase) with self._configuration_factory.create( self._configuration_storage, self._db, LCPServerConfiguration) as configuration: configuration.lcpserver_url = fixtures.LCPSERVER_URL configuration.lcpserver_user = fixtures.LCPSERVER_USER configuration.lcpserver_password = fixtures.LCPSERVER_PASSWORD configuration.provider_name = fixtures.PROVIDER_NAME configuration.passphrase_hint = fixtures.TEXT_HINT configuration.encryption_algorithm = ( LCPServerConfiguration.DEFAULT_ENCRYPTION_ALGORITHM) configuration.max_printable_pages = max_printable_pages configuration.max_copiable_pages = max_copiable_pages self._credential_factory.get_patron_id = MagicMock( return_value=expected_patron_id) self._credential_factory.get_patron_passphrase = MagicMock( return_value=expected_patron_passphrase) with requests_mock.Mocker() as request_mock: url = urllib.parse.urljoin( fixtures.LCPSERVER_URL, "/contents/{0}/license".format(fixtures.CONTENT_ID), ) request_mock.post(url, json=fixtures.LCPSERVER_LICENSE) # Act license = self._lcp_server.generate_license( self._db, fixtures.CONTENT_ID, patron, license_start, license_end) # Assert assert request_mock.called == True assert license == fixtures.LCPSERVER_LICENSE json_request = json.loads(request_mock.last_request.text) assert json_request["provider"] == fixtures.PROVIDER_NAME assert json_request["user"]["id"] == expected_patron_id assert (json_request["encryption"]["user_key"]["text_hint"] == fixtures.TEXT_HINT) assert (json_request["encryption"]["user_key"]["hex_value"] == expected_patron_key) if license_start is not None: assert json_request["rights"][ "start"] == utils.format_datetime(license_start) if license_end is not None: assert json_request["rights"][ "end"] == utils.format_datetime(license_end) if max_printable_pages is not None and max_printable_pages != "": assert json_request["rights"][ "print"] == max_printable_pages if max_copiable_pages is not None and max_copiable_pages != "": assert json_request["rights"]["copy"] == max_copiable_pages all_rights_fields_are_empty = all([ rights_field is None or rights_field == "" for rights_field in [ license_start, license_end, max_printable_pages, max_copiable_pages, ] ]) if all_rights_fields_are_empty: assert ("rights" in json_request) == False self._credential_factory.get_patron_id.assert_called_once_with( self._db, patron) self._credential_factory.get_patron_passphrase.assert_called_once_with( self._db, patron)
def test_import(self, importer, mock_get, datasource, db): """Ensure that ODL2Importer2 correctly processes and imports the ODL feed encoded using OPDS 2.x. NOTE: `freeze_time` decorator is required to treat the licenses in the ODL feed as non-expired. """ # Arrange moby_dick_license = LicenseInfoHelper( license=LicenseHelper( identifier="urn:uuid:f7847120-fc6f-11e3-8158-56847afe9799", concurrency=10, checkouts=30, expires="2016-04-25T12:25:21+02:00", ), left=30, available=10, ) mock_get.add(moby_dick_license) feed = self.get_data("feed.json") configuration_storage = ConfigurationStorage(importer) configuration_factory = ConfigurationFactory() with configuration_factory.create( configuration_storage, db, ODL2APIConfiguration ) as configuration: configuration.skipped_license_formats = json.dumps(["text/html"]) # Act imported_editions, pools, works, failures = importer.import_from_feed(feed) # Assert # 1. Make sure that there is a single edition only assert isinstance(imported_editions, list) assert 1 == len(imported_editions) [moby_dick_edition] = imported_editions assert isinstance(moby_dick_edition, Edition) assert moby_dick_edition.primary_identifier.identifier == "978-3-16-148410-0" assert moby_dick_edition.primary_identifier.type == "ISBN" assert u"Moby-Dick" == moby_dick_edition.title assert u"eng" == moby_dick_edition.language assert u"eng" == moby_dick_edition.language assert EditionConstants.BOOK_MEDIUM == moby_dick_edition.medium assert u"Herman Melville" == moby_dick_edition.author assert 1 == len(moby_dick_edition.author_contributors) [moby_dick_author] = moby_dick_edition.author_contributors assert isinstance(moby_dick_author, Contributor) assert u"Herman Melville" == moby_dick_author.display_name assert u"Melville, Herman" == moby_dick_author.sort_name assert 1 == len(moby_dick_author.contributions) [moby_dick_author_author_contribution] = moby_dick_author.contributions assert isinstance(moby_dick_author_author_contribution, Contribution) assert moby_dick_author == moby_dick_author_author_contribution.contributor assert moby_dick_edition == moby_dick_author_author_contribution.edition assert Contributor.AUTHOR_ROLE == moby_dick_author_author_contribution.role assert datasource == moby_dick_edition.data_source assert u"Test Publisher" == moby_dick_edition.publisher assert datetime.date(2015, 9, 29) == moby_dick_edition.published assert u"http://example.org/cover.jpg" == moby_dick_edition.cover_full_url assert ( u"http://example.org/cover-small.jpg" == moby_dick_edition.cover_thumbnail_url ) # 2. Make sure that license pools have correct configuration assert isinstance(pools, list) assert 1 == len(pools) [moby_dick_license_pool] = pools assert isinstance(moby_dick_license_pool, LicensePool) assert moby_dick_license_pool.identifier.identifier == "978-3-16-148410-0" assert moby_dick_license_pool.identifier.type == "ISBN" assert not moby_dick_license_pool.open_access assert 30 == moby_dick_license_pool.licenses_owned assert 10 == moby_dick_license_pool.licenses_available assert 2 == len(moby_dick_license_pool.delivery_mechanisms) moby_dick_epub_adobe_drm_delivery_mechanism = ( self._get_delivery_mechanism_by_drm_scheme_and_content_type( moby_dick_license_pool.delivery_mechanisms, MediaTypes.EPUB_MEDIA_TYPE, DeliveryMechanism.ADOBE_DRM, ) ) assert moby_dick_epub_adobe_drm_delivery_mechanism is not None moby_dick_epub_lcp_drm_delivery_mechanism = ( self._get_delivery_mechanism_by_drm_scheme_and_content_type( moby_dick_license_pool.delivery_mechanisms, MediaTypes.EPUB_MEDIA_TYPE, DeliveryMechanism.LCP_DRM, ) ) assert moby_dick_epub_lcp_drm_delivery_mechanism is not None assert 1 == len(moby_dick_license_pool.licenses) [moby_dick_license] = moby_dick_license_pool.licenses assert ( "urn:uuid:f7847120-fc6f-11e3-8158-56847afe9799" == moby_dick_license.identifier ) assert ( "http://www.example.com/get{?id,checkout_id,expires,patron_id,passphrase,hint,hint_url,notification_url}" == moby_dick_license.checkout_url ) assert "http://www.example.com/status/294024" == moby_dick_license.status_url assert ( datetime.datetime(2016, 4, 25, 10, 25, 21, tzinfo=datetime.timezone.utc) == moby_dick_license.expires ) assert 30 == moby_dick_license.checkouts_left assert 10 == moby_dick_license.checkouts_available # 3. Make sure that work objects contain all the required metadata assert isinstance(works, list) assert 1 == len(works) [moby_dick_work] = works assert isinstance(moby_dick_work, Work) assert moby_dick_edition == moby_dick_work.presentation_edition assert 1 == len(moby_dick_work.license_pools) assert moby_dick_license_pool == moby_dick_work.license_pools[0] # 4. Make sure that the failure is covered assert 1 == len(failures) huck_finn_failures = failures["9781234567897"] assert 1 == len(huck_finn_failures) [huck_finn_failure] = huck_finn_failures assert isinstance(huck_finn_failure, CoverageFailure) assert "9781234567897" == huck_finn_failure.obj.identifier huck_finn_semantic_error = ( ODL_PUBLICATION_MUST_CONTAIN_EITHER_LICENSES_OR_OA_ACQUISITION_LINK_ERROR( node=ODLPublication( metadata=PresentationMetadata(identifier="urn:isbn:9781234567897") ), node_property=None, ) ) assert str(huck_finn_semantic_error) == huck_finn_failure.exception
class TestProQuestAPIClient(DatabaseTest): def setup_method(self, mock_search=True): super(TestProQuestAPIClient, self).setup_method() self._proquest_collection = self._collection( protocol=ExternalIntegration.PROQUEST) self._integration = self._proquest_collection.external_integration integration_owner = create_autospec(spec=HasExternalIntegration) integration_owner.external_integration = MagicMock( return_value=self._integration) self._configuration_storage = ConfigurationStorage(integration_owner) self._configuration_factory = ConfigurationFactory() self._client = ProQuestAPIClient(self._configuration_storage, self._configuration_factory) @parameterized.expand([ ("in_the_case_of_http_error_status_code", { "status_code": 401 }, HTTPError), ( "in_the_case_of_non_json_response", { "text": "garbage" }, ProQuestAPIInvalidJSONResponseError, ), ( "when_json_document_does_not_contain_status_code", { "json": { "feed": "" } }, ProQuestAPIMissingJSONPropertyError, ), ( "json_document_contains_error_status_code", { "json": { ProQuestAPIClient.RESPONSE_STATUS_CODE_FIELD: 401 } }, HTTPError, ), ( "json_document_does_not_contain_opds_feed", { "json": { ProQuestAPIClient.RESPONSE_STATUS_CODE_FIELD: 200 } }, ProQuestAPIMissingJSONPropertyError, ), ]) def test_download_feed_page_correctly_fails(self, _, response_arguments, expected_exception_class): # Arrange page = 1 hits_per_page = 10 books_catalog_service_url = URLUtility.build_url( BOOKS_CATALOG_SERVICE_URL, { "page": page, "hitsPerPage": hits_per_page }) with self._configuration_factory.create( self._configuration_storage, self._db, ProQuestAPIClientConfiguration) as configuration: configuration.books_catalog_service_url = BOOKS_CATALOG_SERVICE_URL with requests_mock.Mocker() as request_mock: request_mock.get(books_catalog_service_url, **response_arguments) # Act with pytest.raises(expected_exception_class): self._client.download_feed_page(self._db, page, hits_per_page) def test_download_feed_page_successfully_extracts_feed_from_correct_response( self): # Arrange page = 1 hits_per_page = 10 books_catalog_service_url = URLUtility.build_url( BOOKS_CATALOG_SERVICE_URL, { "page": page, "hitsPerPage": hits_per_page }) expected_feed = json.dumps({}) response = { ProQuestAPIClient.RESPONSE_STATUS_CODE_FIELD: 200, ProQuestAPIClient.RESPONSE_OPDS_FEED_FIELD: expected_feed, } with self._configuration_factory.create( self._configuration_storage, self._db, ProQuestAPIClientConfiguration) as configuration: configuration.books_catalog_service_url = BOOKS_CATALOG_SERVICE_URL with requests_mock.Mocker() as request_mock: request_mock.get(books_catalog_service_url, json=response) # Act feed = self._client.download_feed_page(self._db, page, hits_per_page) # Assert assert expected_feed == feed @parameterized.expand([ ( "in_the_case_of_http_error_status_code", { "status_code": 401 }, ), ( "in_the_case_of_non_json_response", { "text": "garbage" }, ), ( "when_json_document_does_not_contain_status_code", { "json": { "feed": "" } }, ), ( "json_document_contains_error_status_code", { "json": { ProQuestAPIClient.RESPONSE_STATUS_CODE_FIELD: 401 } }, ), ( "json_document_does_not_contain_opds_feed", { "json": { ProQuestAPIClient.RESPONSE_STATUS_CODE_FIELD: 200 } }, ), ( "json_document_contains_incorrect_opds_feed", { "json": { ProQuestAPIClient.RESPONSE_STATUS_CODE_FIELD: 200, "opdsFeed": { "publications": [], "group": { "publications": [] } }, } }, ), ( "json_document_contains_empty_opds_feed", { "json": { ProQuestAPIClient.RESPONSE_STATUS_CODE_FIELD: 200, "opdsFeed": { "publications": [], "groups": { "publications": [] }, }, } }, ), ]) def test_download_all_feed_pages_correctly_stops( self, _, last_page_response_arguments): # Arrange page_size = ProQuestAPIClient.MAX_PAGE_SIZE books_catalog_service_url_1 = URLUtility.build_url( BOOKS_CATALOG_SERVICE_URL, { "page": 1, "hitsPerPage": page_size }) books_catalog_service_url_2 = URLUtility.build_url( BOOKS_CATALOG_SERVICE_URL, { "page": 2, "hitsPerPage": page_size }) books_catalog_service_url_3 = URLUtility.build_url( BOOKS_CATALOG_SERVICE_URL, { "page": 3, "hitsPerPage": page_size }) expected_feed_1 = { "metadata": { "title": "Page 1" }, "groups": [{ "publications": [{ "metadata": { "title": "Publication 1" } }] }], } expected_feed_2 = { "metadata": { "title": "Page 2" }, "groups": [{ "publications": [{ "metadata": { "title": "Publication 2" } }] }], } expected_response_1 = { ProQuestAPIClient.RESPONSE_STATUS_CODE_FIELD: 200, ProQuestAPIClient.RESPONSE_OPDS_FEED_FIELD: expected_feed_1, } expected_response_2 = { ProQuestAPIClient.RESPONSE_STATUS_CODE_FIELD: 200, ProQuestAPIClient.RESPONSE_OPDS_FEED_FIELD: expected_feed_2, } with self._configuration_factory.create( self._configuration_storage, self._db, ProQuestAPIClientConfiguration) as configuration: configuration.books_catalog_service_url = BOOKS_CATALOG_SERVICE_URL configuration.page_size = page_size with requests_mock.Mocker() as request_mock: request_mock.get(books_catalog_service_url_1, json=expected_response_1) request_mock.get(books_catalog_service_url_2, json=expected_response_2) request_mock.get(books_catalog_service_url_3, **last_page_response_arguments) # Act feeds = self._client.download_all_feed_pages(self._db) # Assert assert [expected_feed_1, expected_feed_2] == list(feeds) @parameterized.expand([ ("in_the_case_of_http_error_status_code", { "status_code": 401 }, HTTPError), ( "in_the_case_of_non_json_response", { "text": "garbage" }, ProQuestAPIInvalidJSONResponseError, ), ( "when_json_document_does_not_contain_status_code", { "json": { "dummy": "" } }, ProQuestAPIMissingJSONPropertyError, ), ( "json_document_contains_error_status_code", { "json": { ProQuestAPIClient.RESPONSE_STATUS_CODE_FIELD: 401 } }, HTTPError, ), ( "json_document_does_not_contain_token", { "json": { ProQuestAPIClient.RESPONSE_STATUS_CODE_FIELD: 200 } }, ProQuestAPIMissingJSONPropertyError, ), ]) def test_create_token_correctly_fails(self, _, response_arguments, expected_exception_class): # Arrange affiliation_id = "1" partner_auth_token_service_url = URLUtility.build_url( PARTNER_AUTH_TOKEN_SERVICE_URL, {"userName": affiliation_id}) with self._configuration_factory.create( self._configuration_storage, self._db, ProQuestAPIClientConfiguration) as configuration: configuration.partner_auth_token_service_url = ( PARTNER_AUTH_TOKEN_SERVICE_URL) with requests_mock.Mocker() as request_mock: request_mock.get(partner_auth_token_service_url, **response_arguments) # Act with pytest.raises(expected_exception_class): self._client.create_token(self._db, affiliation_id) def test_create_token_correctly_extracts_token(self): # Arrange affiliation_id = "1" partner_auth_token_service_url = URLUtility.build_url( PARTNER_AUTH_TOKEN_SERVICE_URL, {"userName": affiliation_id}) expected_token = "12345" response = { ProQuestAPIClient.RESPONSE_STATUS_CODE_FIELD: 200, ProQuestAPIClient.TOKEN_FIELD: expected_token, } with self._configuration_factory.create( self._configuration_storage, self._db, ProQuestAPIClientConfiguration) as configuration: configuration.partner_auth_token_service_url = ( PARTNER_AUTH_TOKEN_SERVICE_URL) with requests_mock.Mocker() as request_mock: request_mock.get(partner_auth_token_service_url, json=response) # Act token = self._client.create_token(self._db, affiliation_id) # Assert assert expected_token == token @parameterized.expand([ ("in_the_case_of_http_error_status_code", { "status_code": 401 }, HTTPError), ( "when_json_document_does_not_contain_status_code", { "json": { "dummy": "" } }, ProQuestAPIMissingJSONPropertyError, ), ( "json_document_contains_error_status_code", { "json": { ProQuestAPIClient.RESPONSE_STATUS_CODE_FIELD: 401 } }, HTTPError, ), ( "json_document_does_not_contain_download_link", { "json": { ProQuestAPIClient.RESPONSE_STATUS_CODE_FIELD: 200 } }, ProQuestAPIMissingJSONPropertyError, ), ]) def test_get_book_correctly_fails(self, _, response_arguments, expected_exception_class): # Arrange token = "12345" document_id = "12345" download_link_service_url = URLUtility.build_url( DOWNLOAD_LINK_SERVICE_URL, {"docID": document_id}) with self._configuration_factory.create( self._configuration_storage, self._db, ProQuestAPIClientConfiguration) as configuration: configuration.download_link_service_url = DOWNLOAD_LINK_SERVICE_URL with requests_mock.Mocker() as request_mock: request_mock.get(download_link_service_url, **response_arguments) # Act with pytest.raises(expected_exception_class): self._client.get_book(self._db, token, document_id) def test_get_book_correctly_extracts_open_access_books(self): # Arrange book_content = b"PDF Book12345" response_arguments = {"content": book_content} expected_open_access_book = ProQuestBook(content=book_content) token = "12345" document_id = "12345" download_link_service_url = URLUtility.build_url( DOWNLOAD_LINK_SERVICE_URL, {"docID": document_id}) with self._configuration_factory.create( self._configuration_storage, self._db, ProQuestAPIClientConfiguration) as configuration: configuration.download_link_service_url = DOWNLOAD_LINK_SERVICE_URL with requests_mock.Mocker() as request_mock: request_mock.get(download_link_service_url, **response_arguments) # Act book = self._client.get_book(self._db, token, document_id) # Assert assert expected_open_access_book == book assert type(expected_open_access_book) == type(book) def test_get_book_correctly_extracts_acsm_books(self, ): # Arrange acsm_file_content = """<fulfillmentToken fulfillmentType="loan" auth="user" xmlns="http://ns.adobe.com/adept"> <distributor>urn:uuid:9cb786e8-586a-4950-8901-fff8d2ee6025</distributor> </fulfillmentToken """ download_link = "https://proquest.com/fulfill?documentID=12345" expected_acsm_book = ProQuestBook( content=acsm_file_content.encode("utf-8"), content_type=DeliveryMechanism.ADOBE_DRM, ) first_response_arguments = { "json": { ProQuestAPIClient.RESPONSE_STATUS_CODE_FIELD: 200, ProQuestAPIClient.DOWNLOAD_LINK_FIELD: download_link, } } second_response_arguments = { "content": acsm_file_content.encode("utf-8") } token = "12345" document_id = "12345" download_link_service_url = URLUtility.build_url( DOWNLOAD_LINK_SERVICE_URL, {"docID": document_id}) with self._configuration_factory.create( self._configuration_storage, self._db, ProQuestAPIClientConfiguration) as configuration: configuration.download_link_service_url = DOWNLOAD_LINK_SERVICE_URL with requests_mock.Mocker() as request_mock: request_mock.get(download_link_service_url, **first_response_arguments) request_mock.get(download_link, **second_response_arguments) # Act book = self._client.get_book(self._db, token, document_id) # Assert assert expected_acsm_book == book assert type(expected_acsm_book) == type(book)