def test_add_for(self): work = self._work() operation = "foo" record, is_new = WorkCoverageRecord.add_for(work, operation) assert True == is_new # If we call add_for again we get the same record back, but we # can modify the timestamp. a_week_ago = utc_now() - datetime.timedelta(days=7) record2, is_new = WorkCoverageRecord.add_for(work, operation, a_week_ago) assert record == record2 assert False == is_new assert a_week_ago == record2.timestamp # If we don't specify an operation we get a totally different # record. record3, ignore = WorkCoverageRecord.add_for(work, None) assert record3 != record assert None == record3.operation seconds = (utc_now() - record3.timestamp).seconds assert seconds < 10 # If we call lookup we get the same record. record4 = WorkCoverageRecord.lookup(work, None) assert record3 == record4 # We can change the status. record5, is_new = WorkCoverageRecord.add_for( work, operation, status=WorkCoverageRecord.PERSISTENT_FAILURE ) assert record5 == record assert WorkCoverageRecord.PERSISTENT_FAILURE == record.status
def test_patron_annotations_are_descending(self): pool1 = self._licensepool(None) pool2 = self._licensepool(None) patron = self._patron() annotation1, ignore = create( self._db, Annotation, patron=patron, identifier=pool2.identifier, motivation=Annotation.IDLING, content="The content", active=True, ) annotation2, ignore = create( self._db, Annotation, patron=patron, identifier=pool2.identifier, motivation=Annotation.IDLING, content="The content", active=True, ) yesterday = utc_now() - datetime.timedelta(days=1) today = utc_now() annotation1.timestamp = yesterday annotation2.timestamp = today assert 2 == len(patron.annotations) assert annotation2 == patron.annotations[0] assert annotation1 == patron.annotations[1]
def test_add_for(self): source = DataSource.lookup(self._db, DataSource.OCLC) edition = self._edition() operation = "foo" record, is_new = CoverageRecord.add_for(edition, source, operation) assert True == is_new # If we call add_for again we get the same record back, but we # can modify the timestamp. a_week_ago = utc_now() - datetime.timedelta(days=7) record2, is_new = CoverageRecord.add_for(edition, source, operation, a_week_ago) assert record == record2 assert False == is_new assert a_week_ago == record2.timestamp # If we don't specify an operation we get a totally different # record. record3, ignore = CoverageRecord.add_for(edition, source) assert record3 != record assert None == record3.operation seconds = (utc_now() - record3.timestamp).seconds assert seconds < 10 # If we call lookup we get the same record. record4 = CoverageRecord.lookup(edition.primary_identifier, source) assert record3 == record4 # We can change the status. record5, is_new = CoverageRecord.add_for( edition, source, operation, status=CoverageRecord.PERSISTENT_FAILURE ) assert record5 == record assert CoverageRecord.PERSISTENT_FAILURE == record.status
def __init__(self, name_id, attribute_statement, valid_till=None): """Initializes a new instance of Subject class :param name_id: Name ID :type name_id: SAMLNameID :param attribute_statement: Attribute statement :type attribute_statement: SAMLAttributeStatement :param valid_till: Time till which the subject is valid The default value is 30 minutes Please refer to the Shibboleth IdP documentation for more details: - https://wiki.shibboleth.net/confluence/display/IDP30/SessionConfiguration :type valid_till: Optional[Union[datetime.datetime, datetime.timedelta]] """ self._name_id = name_id self._attribute_statement = attribute_statement self._valid_till = valid_till if valid_till is None: self._valid_till = datetime.timedelta(minutes=30) elif isinstance(valid_till, datetime.datetime): self._valid_till = valid_till - utc_now() elif isinstance(valid_till, int): self._valid_till = from_timestamp(valid_till) - utc_now() elif isinstance(valid_till, datetime.timedelta): self._valid_till = valid_till else: raise ValueError("valid_till is not valid")
def test_uniqueness_constraints_with_library(self): # If library is provided, then license_pool + library + type + # start must be unique. pool = self._licensepool(edition=None) now = utc_now() kwargs = dict( license_pool=pool, library=self._default_library, type=CirculationEvent.DISTRIBUTOR_TITLE_ADD, ) event = create(self._db, CirculationEvent, start=now, **kwargs) # Different timestamp -- no problem. now2 = utc_now() event2 = create(self._db, CirculationEvent, start=now2, **kwargs) assert event != event2 # Reuse the timestamp and you get an IntegrityError which ruins the # entire transaction. pytest.raises(IntegrityError, create, self._db, CirculationEvent, start=now, **kwargs) self._db.rollback()
def test_stamp(self): service = "service" type = Timestamp.SCRIPT_TYPE # If no date is specified, the value of the timestamp is the time # stamp() was called. stamp = Timestamp.stamp(self._db, service, type) now = utc_now() assert (now - stamp.finish).total_seconds() < 2 assert stamp.start == stamp.finish assert service == stamp.service assert type == stamp.service_type assert None == stamp.collection assert None == stamp.achievements assert None == stamp.counter assert None == stamp.exception # Calling stamp() again will update the Timestamp. stamp2 = Timestamp.stamp( self._db, service, type, achievements="yay", counter=100, exception="boo" ) assert stamp == stamp2 now = utc_now() assert (now - stamp.finish).total_seconds() < 2 assert stamp.start == stamp.finish assert service == stamp.service assert type == stamp.service_type assert None == stamp.collection assert "yay" == stamp.achievements assert 100 == stamp.counter assert "boo" == stamp.exception # Passing in a different collection will create a new Timestamp. stamp3 = Timestamp.stamp( self._db, service, type, collection=self._default_collection ) assert stamp3 != stamp assert self._default_collection == stamp3.collection # Passing in CLEAR_VALUE for start, end, or exception will # clear an existing Timestamp. stamp4 = Timestamp.stamp( self._db, service, type, start=Timestamp.CLEAR_VALUE, finish=Timestamp.CLEAR_VALUE, exception=Timestamp.CLEAR_VALUE, ) assert stamp4 == stamp assert None == stamp4.start assert None == stamp4.finish assert None == stamp4.exception
def test_isbns_updated_since(self): i1 = self._identifier(identifier_type=Identifier.ISBN, foreign_id=self._isbn) i2 = self._identifier(identifier_type=Identifier.ISBN, foreign_id=self._isbn) i3 = self._identifier(identifier_type=Identifier.ISBN, foreign_id=self._isbn) i4 = self._identifier(identifier_type=Identifier.ISBN, foreign_id=self._isbn) timestamp = utc_now() # An empty catalog returns nothing.. assert [] == self.collection.isbns_updated_since(self._db, None).all() # Give the ISBNs some coverage. content_cafe = DataSource.lookup(self._db, DataSource.CONTENT_CAFE) for isbn in [i2, i3, i1]: self._coverage_record(isbn, content_cafe) # Give one ISBN more than one coverage record. oclc = DataSource.lookup(self._db, DataSource.OCLC) i1_oclc_record = self._coverage_record(i1, oclc) def assert_isbns(expected, result_query): results = [r[0] for r in result_query] assert expected == results # When no timestamp is given, all ISBNs in the catalog are returned, # in order of their CoverageRecord timestamp. self.collection.catalog_identifiers([i1, i2]) updated_isbns = self.collection.isbns_updated_since(self._db, None).all() assert_isbns([i2, i1], updated_isbns) # That CoverageRecord timestamp is also returned. i1_timestamp = updated_isbns[1][1] assert isinstance(i1_timestamp, datetime.datetime) assert i1_oclc_record.timestamp == i1_timestamp # When a timestamp is passed, only works that have been updated since # then will be returned. timestamp = utc_now() i1.coverage_records[0].timestamp = utc_now() updated_isbns = self.collection.isbns_updated_since(self._db, timestamp) assert_isbns([i1], updated_isbns) # Prepare an ISBN associated with a Work. work = self._work(with_license_pool=True) work.license_pools[0].identifier = i2 i2.coverage_records[0].timestamp = utc_now() # ISBNs that have a Work will be ignored. updated_isbns = self.collection.isbns_updated_since(self._db, timestamp) assert_isbns([i1], updated_isbns)
def test_cutoff(self): """Test that cutoff behaves correctly when given different values for ReaperMonitor.MAX_AGE. """ m = MockReaperMonitor(self._db) # A number here means a number of days. for value in [1, 1.5, -1]: m.MAX_AGE = value expect = utc_now() - datetime.timedelta(days=value) self.time_eq(m.cutoff, expect) # But you can pass in a timedelta instead. m.MAX_AGE = datetime.timedelta(seconds=99) self.time_eq(m.cutoff, utc_now() - m.MAX_AGE)
def test__should_refresh(self): # Test the algorithm that tells whether a CachedFeed is stale. m = CachedFeed._should_refresh # If there's no CachedFeed, we must always refresh. assert True == m(None, object()) class MockCachedFeed(object): def __init__(self, timestamp): self.timestamp = timestamp now = utc_now() # This feed was generated five minutes ago. five_minutes_old = MockCachedFeed(now - datetime.timedelta(minutes=5)) # This feed was generated a thousand years ago. ancient = MockCachedFeed(now - datetime.timedelta(days=1000 * 365)) # If we intend to cache forever, then even a thousand-year-old # feed shouldn't be refreshed. assert False == m(ancient, CachedFeed.CACHE_FOREVER) # Otherwise, it comes down to a date comparison. # If we're caching a feed for ten minutes, then the # five-minute-old feed should not be refreshed. assert False == m(five_minutes_old, 600) # If we're caching a feed for only a few seconds (or not at all), # then the five-minute-old feed should be refreshed. assert True == m(five_minutes_old, 0) assert True == m(five_minutes_old, 1)
def _navigation_feed(self, library, annotator, url_for=None): """Generate an OPDS feed for navigating the COPPA age gate.""" url_for = url_for or cdn_url_for base_url = url_for("index", library_short_name=library.short_name) # An entry for grown-ups. feed = OPDSFeed(title=library.name, url=base_url) opds = feed.feed yes_url = url_for( "acquisition_groups", library_short_name=library.short_name, lane_identifier=self.yes_lane_id, ) opds.append(self.navigation_entry(yes_url, self.YES_TITLE, self.YES_CONTENT)) # An entry for children. no_url = url_for( "acquisition_groups", library_short_name=library.short_name, lane_identifier=self.no_lane_id, ) opds.append(self.navigation_entry(no_url, self.NO_TITLE, self.NO_CONTENT)) # The gate tag is the thing that the SimplyE client actually uses. opds.append(self.gate_tag(self.URI, yes_url, no_url)) # Add any other links associated with this library, notably # the link to its authentication document. if annotator: annotator.annotate_feed(feed, None) now = utc_now() opds.append(OPDSFeed.E.updated(OPDSFeed._strftime(now))) return feed
def test_items_that_need_coverage_respects_cutoff(self): """Verify that this coverage provider respects the cutoff_time argument. """ edition, pool = self._edition( with_license_pool=True, collection=self.collection, identifier_type=Identifier.BIBLIOTHECA_ID, ) cr = self._coverage_record( pool.identifier, self.provider.data_source, operation=self.provider.OPERATION, collection=self.collection, ) # We have a coverage record already, so this book doesn't show # up in items_that_need_coverage items = self.provider.items_that_need_coverage().all() assert [] == items # But if we send a cutoff_time that's later than the time # associated with the coverage record... one_hour_from_now = utc_now() + datetime.timedelta(seconds=3600) provider_with_cutoff = self.create_provider( cutoff_time=one_hour_from_now) # The book starts showing up in items_that_need_coverage. assert [pool.identifier ] == provider_with_cutoff.items_that_need_coverage().all()
def test_no_changes_means_no_timestamp_update(self): before = utc_now() self.monitor.timestamp().finish = before # We're going to ask the metadata wrangler for updates, but # there will be none -- not even a feed-level update data = sample_data( "metadata_updates_empty_response_no_feed_timestamp.opds", "opds") self.lookup.queue_response( 200, {"content-type": OPDSFeed.ACQUISITION_FEED_TYPE}, data) new_timestamp = self.monitor.run_once(self.ts) # run_once() returned a TimestampData referencing the original # timestamp, and the Timestamp object was not updated. assert before == new_timestamp.finish assert before == self.monitor.timestamp().finish # If timestamp.finish is None before the update is run, and # there are no updates, the timestamp will be set # to None. self.monitor.timestamp().finish = None self.lookup.queue_response( 200, {"content-type": OPDSFeed.ACQUISITION_FEED_TYPE}, data) new_timestamp = self.monitor.run_once(self.ts) assert Timestamp.CLEAR_VALUE == new_timestamp.finish
def test_credential_refresh_success(self): """Verify the process of refreshing the Odilo bearer token.""" credential = self.api.credential_object(lambda x: x) assert "bearer token" == credential.credential assert self.api.token == credential.credential self.api.access_token_response = self.api.mock_access_token_response( "new bearer token") self.api.refresh_creds(credential) assert "new bearer token" == credential.credential assert self.api.token == credential.credential # By default, the access token's 'expiresIn' value is -1, # indicating that the token will never expire. # # To reflect this fact, credential.expires is set to None. assert None == credential.expires # But a token may specify a specific expiration time, # which is used to set a future value for credential.expires. self.api.access_token_response = self.api.mock_access_token_response( "new bearer token 2", 1000) self.api.refresh_creds(credential) assert "new bearer token 2" == credential.credential assert self.api.token == credential.credential assert credential.expires > utc_now()
def test_checkout(self): patron = self._patron() data_source = DataSource.lookup(self._db, "Biblioboard", autocreate=True) edition, pool = self._edition( identifier_type=Identifier.URI, data_source_name=data_source.name, with_license_pool=True, collection=self.collection, ) loan_info = self.api.checkout(patron, "1234", pool, Representation.EPUB_MEDIA_TYPE) assert self.collection.id == loan_info.collection_id assert data_source.name == loan_info.data_source_name assert Identifier.URI == loan_info.identifier_type assert pool.identifier.identifier == loan_info.identifier # The loan's start date has been set to the current time. now = utc_now() assert (now - loan_info.start_date).seconds < 2 # The loan is of indefinite duration. assert None == loan_info.end_date
def needs_external_sync(cls, patron): """Could this patron stand to have their metadata synced with the remote? By default, all patrons get synced once every twelve hours. Patrons who lack borrowing privileges can always stand to be synced, since their privileges may have just been restored. """ if not patron.last_external_sync: # This patron has never been synced. return True now = utc_now() if cls.has_borrowing_privileges(patron): # A patron who has borrowing privileges gets synced every twelve # hours. Their account is unlikely to change rapidly. check_every = Patron.MAX_SYNC_TIME else: # A patron without borrowing privileges might get synced # every time they make a request. It's likely they are # taking action to get their account reinstated and we # don't want to make them wait twelve hours to get access. check_every = datetime.timedelta(seconds=5) expired_at = patron.last_external_sync + check_every if now > expired_at: return True return False
def process_book(self, bibliographic): """Make the local database reflect the state of the remote Enki collection for the given book. :param bibliographic: A Metadata object with attached CirculationData :return: A 2-tuple (LicensePool, Edition). If possible, a presentation-ready Work will be created for the LicensePool. """ availability = bibliographic.circulation edition, new_edition = bibliographic.edition(self._db) now = utc_now() policy = ReplacementPolicy( identifiers=False, subjects=True, contributions=True, formats=True, ) bibliographic.apply(edition, self.collection, replace=policy) license_pool, ignore = availability.license_pool(self._db, self.collection) if new_edition: for library in self.collection.libraries: self.analytics.collect_event( library, license_pool, CirculationEvent.DISTRIBUTOR_TITLE_ADD, now ) return edition, license_pool
def test_monitor_lifecycle(self): monitor = MockMonitor(self._db, self._default_collection) monitor.default_start_time = datetime_utc(2010, 1, 1) # There is no timestamp for this monitor. def get_timestamp(): return get_one(self._db, Timestamp, service=monitor.service_name) assert None == get_timestamp() # Run the monitor. monitor.run() # The monitor ran once and then stopped. [progress] = monitor.run_records # The TimestampData passed in to run_once() had the # Monitor's default start time as its .start, and an empty # time for .finish. assert monitor.default_start_time == progress.start assert None == progress.finish # But the Monitor's underlying timestamp has been updated with # the time that the monitor actually took to run. timestamp = get_timestamp() assert timestamp.start > monitor.default_start_time assert timestamp.finish > timestamp.start self.time_eq(utc_now(), timestamp.start) # cleanup() was called once. assert [True] == monitor.cleanup_records
def test_run_once(self): # Create four Credentials: two expired, two valid. expired1 = self._credential() expired2 = self._credential() now = utc_now() expiration_date = now - datetime.timedelta( days=CredentialReaper.MAX_AGE + 1) for e in [expired1, expired2]: e.expires = expiration_date active = self._credential() active.expires = now - datetime.timedelta( days=CredentialReaper.MAX_AGE - 1) eternal = self._credential() m = CredentialReaper(self._db) # Set the batch size to 1 to make sure this works even # when there are multiple batches. m.BATCH_SIZE = 1 assert "Reaper for Credential.expires" == m.SERVICE_NAME result = m.run_once() assert "Items deleted: 2" == result.achievements # The expired credentials have been reaped; the others # are still in the database. remaining = set(self._db.query(Credential).all()) assert set([active, eternal]) == remaining
def test_subclass_cannot_modify_dates(self): """The subclass can modify some fields of the TimestampData passed in to it, but it can't modify the start or end dates. If you want that, you shouldn't subclass TimelineMonitor. """ class Mock(TimelineMonitor): DEFAULT_START_TIME = Monitor.NEVER SERVICE_NAME = "I aim to misbehave" def catch_up_from(self, start, cutoff, progress): progress.start = 1 progress.finish = 2 progress.counter = 3 progress.achievements = 4 m = Mock(self._db) progress = m.timestamp().to_data() m.run_once(progress) now = utc_now() # The timestamp values have been set to appropriate values for # the portion of the timeline covered, overriding our values. assert None == progress.start self.time_eq(now, progress.finish) # The non-timestamp values have been left alone. assert 3 == progress.counter assert 4 == progress.achievements
def test_availability_needs_update(self): """Test the logic that controls whether a LicensePool's availability information should actually be updated. """ identifier = IdentifierData(Identifier.GUTENBERG_ID, "1") now = utc_now() yesterday = now - datetime.timedelta(days=1) recent_data = CirculationData(DataSource.GUTENBERG, identifier) # CirculationData.last_checked defaults to the current time. assert (recent_data.last_checked - now).total_seconds() < 10 old_data = CirculationData(DataSource.GUTENBERG, identifier, last_checked=yesterday) edition, pool = self._edition(with_license_pool=True) # A pool that has never been checked always needs to be updated. pool.last_checked = None assert True == recent_data._availability_needs_update(pool) assert True == old_data._availability_needs_update(pool) # A pool that has been checked before only needs to be updated # if the information is at least as new as what we had before. pool.last_checked = now assert True == recent_data._availability_needs_update(pool) assert False == old_data._availability_needs_update(pool)
def test_fulfil(self): # Arrange lcp_api = LCPAPI(self._db, self._lcp_collection) patron = self._patron() days = self._lcp_collection.default_loan_period(patron.library) today = utc_now() expires = today + datetime.timedelta(days=days) data_source = DataSource.lookup(self._db, DataSource.LCP, autocreate=True) data_source_name = data_source.name license_pool = self._licensepool( edition=None, data_source_name=data_source_name, collection=self._lcp_collection, ) lcp_license = json.loads(fixtures.LCPSERVER_LICENSE) lcp_server_mock = create_autospec(spec=LCPServer) lcp_server_mock.get_license = MagicMock(return_value=lcp_license) with self._configuration_factory.create( self._configuration_storage, self._db, LCPServerConfiguration) as configuration: with patch( "api.lcp.collection.LCPServer") as lcp_server_constructor: lcp_server_constructor.return_value = lcp_server_mock configuration.lcpserver_url = fixtures.LCPSERVER_URL configuration.lcpserver_user = fixtures.LCPSERVER_USER configuration.lcpserver_password = fixtures.LCPSERVER_PASSWORD configuration.lcpserver_input_directory = ( fixtures.LCPSERVER_INPUT_DIRECTORY) configuration.provider_name = fixtures.PROVIDER_NAME configuration.passphrase_hint = fixtures.TEXT_HINT configuration.encryption_algorithm = ( LCPServerConfiguration.DEFAULT_ENCRYPTION_ALGORITHM) # Act license_pool.loan_to( patron, start=today, end=expires, external_identifier=lcp_license["id"], ) fulfilment_info = lcp_api.fulfill(patron, "pin", license_pool, "internal format") # Assert assert isinstance(fulfilment_info, LCPFulfilmentInfo) == True assert fulfilment_info.collection_id == self._lcp_collection.id assert fulfilment_info.collection( self._db) == self._lcp_collection assert fulfilment_info.license_pool(self._db) == license_pool assert fulfilment_info.data_source_name == data_source_name assert fulfilment_info.identifier_type == license_pool.identifier.type lcp_server_mock.get_license.assert_called_once_with( self._db, lcp_license["id"], patron)
def test_where_clause(self): # Two books. ignore, lp1 = self._edition(with_license_pool=True) ignore, lp2 = self._edition(with_license_pool=True) # Two patrons who sync their annotations. p1 = self._patron() p2 = self._patron() for p in [p1, p2]: p.synchronize_annotations = True now = utc_now() not_that_old = now - datetime.timedelta(days=59) very_old = now - datetime.timedelta(days=61) def _annotation(patron, pool, content, motivation=Annotation.IDLING, timestamp=very_old): annotation, ignore = Annotation.get_one_or_create( self._db, patron=patron, identifier=pool.identifier, motivation=motivation, ) annotation.timestamp = timestamp annotation.content = content return annotation # The first patron will not be affected by the # reaper. Although their annotations are very old, they have # an active loan for one book and a hold on the other. loan = lp1.loan_to(p1) old_loan = _annotation(p1, lp1, "old loan") hold = lp2.on_hold_to(p1) old_hold = _annotation(p1, lp2, "old hold") # The second patron has a very old annotation for the first # book. This is the only annotation that will be affected by # the reaper. reapable = _annotation(p2, lp1, "abandoned") # The second patron also has a very old non-idling annotation # for the first book, which will not be reaped because only # idling annotations are reaped. not_idling = _annotation(p2, lp1, "not idling", motivation="some other motivation") # The second patron has a non-old idling annotation for the # second book, which will not be reaped (even though there is # no active loan or hold) because it's not old enough. new_idling = _annotation(p2, lp2, "recent", timestamp=not_that_old) reaper = IdlingAnnotationReaper(self._db) qu = self._db.query(Annotation).filter(reaper.where_clause) assert [reapable] == qu.all()
def _minutes_since(cls, since): """How many minutes have elapsed since `since`? This is a helper method to create the `minutes` parameter to the API. """ now = utc_now() return int((now - since).total_seconds() / 60)
def test_register_resolved(self): complaint, is_new = Complaint.register( self.pool, self.type, "foo", "bar", resolved=utc_now() ) assert True == is_new assert self.type == complaint.type assert "foo" == complaint.source assert "bar" == complaint.detail assert abs(utc_now() - complaint.timestamp).seconds < 3 assert abs(utc_now() - complaint.resolved).seconds < 3 # A second complaint from the same source is not folded into the same complaint. complaint2, is_new = Complaint.register(self.pool, self.type, "foo", "baz") assert True == is_new assert complaint2.id != complaint.id assert "baz" == complaint2.detail assert 2 == len(self.pool.complaints)
def test_checkout_with_existing_loan(self): # Arrange lcp_api = LCPAPI(self._db, self._lcp_collection) patron = self._patron() days = self._lcp_collection.default_loan_period(patron.library) start_date = utc_now() end_date = start_date + datetime.timedelta(days=days) data_source = DataSource.lookup(self._db, DataSource.LCP, autocreate=True) data_source_name = data_source.name edition = self._edition(data_source_name=data_source_name, identifier_id=fixtures.CONTENT_ID) license_pool = self._licensepool( edition=edition, data_source_name=data_source_name, collection=self._lcp_collection, ) lcp_license = json.loads(fixtures.LCPSERVER_LICENSE) lcp_server_mock = create_autospec(spec=LCPServer) lcp_server_mock.get_license = MagicMock(return_value=lcp_license) loan_identifier = "e99be177-4902-426a-9b96-0872ae877e2f" license_pool.loan_to(patron, external_identifier=loan_identifier) with self._configuration_factory.create( self._configuration_storage, self._db, LCPServerConfiguration) as configuration: with patch( "api.lcp.collection.LCPServer") as lcp_server_constructor: lcp_server_constructor.return_value = lcp_server_mock configuration.lcpserver_url = fixtures.LCPSERVER_URL configuration.lcpserver_user = fixtures.LCPSERVER_USER configuration.lcpserver_password = fixtures.LCPSERVER_PASSWORD configuration.lcpserver_input_directory = ( fixtures.LCPSERVER_INPUT_DIRECTORY) configuration.provider_name = fixtures.PROVIDER_NAME configuration.passphrase_hint = fixtures.TEXT_HINT configuration.encryption_algorithm = ( LCPServerConfiguration.DEFAULT_ENCRYPTION_ALGORITHM) # Act loan = lcp_api.checkout(patron, "pin", license_pool, "internal format") # Assert assert loan.collection_id == self._lcp_collection.id assert loan.collection(self._db) == self._lcp_collection assert loan.license_pool(self._db) == license_pool assert loan.data_source_name == data_source_name assert loan.identifier_type == license_pool.identifier.type assert loan.external_identifier == loan_identifier assert loan.start_date == start_date assert loan.end_date == end_date lcp_server_mock.get_license.assert_called_once_with( self._db, loan_identifier, patron)
def catch_up_from(self, start, cutoff, progress): """Find Odilo books that changed recently. :progress: A TimestampData representing the time previously covered by this Monitor. """ self.log.info("Starting recently_changed_ids, start: " + str(start) + ", cutoff: " + str(cutoff)) start_time = utc_now() updated, new = self.all_ids(start) finish_time = utc_now() time_elapsed = finish_time - start_time self.log.info("recently_changed_ids finished in: " + str(time_elapsed)) progress.achievements = "Updated records: %d. New records: %d." % ( updated, new)
def test_holds_not_allowed(self): patron = self._patron() edition = self._edition() pool = self._licensepool(edition) self._default_library.setting(Library.ALLOW_HOLDS).value = False with pytest.raises(PolicyException) as excinfo: pool.on_hold_to(patron, utc_now(), 4) assert "Holds are disabled for this library." in str(excinfo.value)
class MockMilleniumPatronAPI(MilleniumPatronAPI): """This mocks the API on a higher level than the HTTP level. It is not used in the tests of the MilleniumPatronAPI class. It is used in the Adobe Vendor ID tests but maybe it shouldn't. """ # For expiration dates we're using UTC instead of local time for # convenience; the difference doesn't matter because the dates in # question are at least 10 days away from the current date. # This user's card has expired. user1 = PatronData( permanent_id="12345", authorization_identifier="0", username="******", authorization_expires=datetime_utc(2015, 4, 1), ) # This user's card still has ten days on it. the_future = utc_now() + datetime.timedelta(days=10) user2 = PatronData( permanent_id="67890", authorization_identifier="5", username="******", authorization_expires=the_future, ) users = [user1, user2] def __init__(self): pass def remote_authenticate(self, barcode, pin): """A barcode that's 14 digits long is treated as valid, no matter which PIN is used. That's so real barcode/PIN combos can be passed through to third parties. Otherwise, valid test PIN is the first character of the barcode repeated four times. """ u = self.dump(barcode) if "ERRNUM" in u: return False return len(barcode) == 14 or pin == barcode[0] * 4 def remote_patron_lookup(self, patron_or_patrondata): # We have a couple custom barcodes. look_for = patron_or_patrondata.authorization_identifier for u in self.users: if u.authorization_identifier == look_for: return u return None
def _update_credential(credential, odilo_data): """Copy Odilo OAuth data into a Credential object.""" credential.credential = odilo_data["token"] if odilo_data["expiresIn"] == -1: # This token never expires. credential.expires = None else: expires_in = odilo_data["expiresIn"] * 0.9 credential.expires = utc_now() + datetime.timedelta( seconds=expires_in)
def _event_data(self, **kwargs): for k, default in ( ("source", DataSource.OVERDRIVE), ("id_type", Identifier.OVERDRIVE_ID), ("start", utc_now()), ("type", CirculationEvent.DISTRIBUTOR_LICENSE_ADD), ): kwargs.setdefault(k, default) if "old_value" in kwargs and "new_value" in kwargs: kwargs["delta"] = kwargs["new_value"] - kwargs["old_value"] return kwargs