def test_update_new_licensepool(self):
        data, raw = self.sample_json("overdrive_availability_information.json")

        # Create an identifier
        identifier = self._identifier(
            identifier_type=Identifier.OVERDRIVE_ID
        )

        # Make it look like the availability information is for the
        # newly created Identifier.
        raw['id'] = identifier.identifier

        pool, was_new = LicensePool.for_foreign_id(
            self._db, DataSource.OVERDRIVE,
            identifier.type, identifier.identifier,
            collection=self.collection
        )

        pool, was_new, changed = self.api.update_licensepool_with_book_info(
            raw, pool, was_new
        )
        eq_(True, was_new)
        eq_(True, changed)

        self._db.commit()

        eq_(raw['copiesOwned'], pool.licenses_owned)
        eq_(raw['copiesAvailable'], pool.licenses_available)
        eq_(0, pool.licenses_reserved)
        eq_(raw['numberOfHolds'], pool.patrons_in_hold_queue)
    def test_update_new_licensepool_when_same_book_has_pool_in_different_collection(self):
        old_edition, old_pool = self._edition(
            data_source_name=DataSource.OVERDRIVE,
            identifier_type=Identifier.OVERDRIVE_ID,
            with_license_pool=True,
        )
        old_pool.calculate_work()
        collection = self._collection()

        data, raw = self.sample_json("overdrive_availability_information.json")

        # Make it look like the availability information is for the
        # old pool's Identifier.
        identifier = old_pool.identifier
        raw['id'] = identifier.identifier

        new_pool, was_new = LicensePool.for_foreign_id(
            self._db, DataSource.OVERDRIVE,
            identifier.type, identifier.identifier,
            collection=collection
        )
        # The new pool doesn't have a presentation edition yet,
        # but it will be updated to share the old pool's edition.
        eq_(None, new_pool.presentation_edition)

        new_pool, was_new, changed = self.api.update_licensepool_with_book_info(
            raw, new_pool, was_new
        )
        eq_(True, was_new)
        eq_(True, changed)
        eq_(old_edition, new_pool.presentation_edition)
        eq_(old_pool.work, new_pool.work)
    def test_update_new_licensepool(self):
        data, raw = self.sample_json("overdrive_availability_information.json")

        # Create an identifier
        identifier = self._identifier(
            identifier_type=Identifier.OVERDRIVE_ID
        )

        # Make it look like the availability information is for the
        # newly created Identifier.
        raw['id'] = identifier.identifier

        api = DummyOverdriveAPI(self._db)
        pool, was_new = LicensePool.for_foreign_id(
            self._db, DataSource.OVERDRIVE, 
            identifier.type, identifier.identifier
        )
        pool, was_new, changed = api.update_licensepool_with_book_info(
            raw, pool, was_new
        )
        eq_(True, was_new)
        eq_(True, changed)

        # The title of the corresponding Edition has been filled
        # in, just to provide some basic human-readable metadata.
        self._db.commit()
        eq_("Blah blah blah", pool.edition.title)
        eq_(raw['copiesOwned'], pool.licenses_owned)
        eq_(raw['copiesAvailable'], pool.licenses_available)
        eq_(0, pool.licenses_reserved)
        eq_(raw['numberOfHolds'], pool.patrons_in_hold_queue)
Beispiel #4
0
    def test_finalize_edition(self):

        provider_no_presentation_ready = self._provider(presentation_ready_on_success=False)
        provider_presentation_ready = self._provider(presentation_ready_on_success=True)
        identifier = self._identifier()
        source = DataSource.lookup(self._db, DataSource.GUTENBERG)

        # Here's an Edition with no LicensePool.
        edition, is_new = Edition.for_foreign_id(
            self._db, source, identifier.type, identifier.identifier
        )
        edition.title = self._str

        # This will effectively do nothing.
        provider_no_presentation_ready.finalize_edition(edition)

        # No Works have been created.
        eq_(0, self._db.query(Work).count())

        # But if there's also a LicensePool...
        pool, is_new = LicensePool.for_foreign_id(
            self._db, source, identifier.type, identifier.identifier
        )

        # finalize_edition() will create a Work.
        provider_no_presentation_ready.finalize_edition(edition)

        work = pool.work
        eq_(work, edition.work)
        eq_(False, work.presentation_ready)

        # If the provider is configured to do so, finalize_edition()
        # will also set the Work as presentation-ready.
        provider_presentation_ready.finalize_edition(edition)
        eq_(True, work.presentation_ready)
Beispiel #5
0
    def process_batch(self, identifiers):
        identifiers_by_threem_id = dict()
        threem_ids = set()
        for identifier in identifiers:
            threem_ids.add(identifier.identifier)
            identifiers_by_threem_id[identifier.identifier] = identifier

        identifiers_not_mentioned_by_threem = set(identifiers)
        now = datetime.datetime.utcnow()

        for circ in self.api.get_circulation_for(threem_ids):
            if not circ:
                continue
            threem_id = circ[Identifier][Identifier.THREEM_ID]
            identifier = identifiers_by_threem_id[threem_id]
            identifiers_not_mentioned_by_threem.remove(identifier)

            pool = identifier.licensed_through
            if not pool:
                # We don't have a license pool for this work. That
                # shouldn't happen--how did we know about the
                # identifier?--but it shouldn't be a big deal to
                # create one.
                pool, ignore = LicensePool.for_foreign_id(
                    self._db, self.data_source, identifier.type,
                    identifier.identifier)

                # 3M books are never open-access.
                pool.open_access = False
                CirculationEvent.log(
                    self._db, pool, CirculationEvent.TITLE_ADD,
                    None, None, start=now)

            self.api.apply_circulation_information_to_licensepool(circ, pool)

        # At this point there may be some license pools left over
        # that 3M doesn't know about.  This is a pretty reliable
        # indication that we no longer own any licenses to the
        # book.
        for identifier in identifiers_not_mentioned_by_threem:
            pool = identifier.licensed_through
            if not pool:
                continue
            if pool.licenses_owned > 0:
                if pool.presentation_edition:
                    self.log.warn("Removing %s (%s) from circulation",
                                  pool.presentation_edition.title, pool.presentation_edition.author)
                else:
                    self.log.warn(
                        "Removing unknown work %s from circulation.",
                        identifier.identifier
                    )
            pool.licenses_owned = 0
            pool.licenses_available = 0
            pool.licenses_reserved = 0
            pool.patrons_in_hold_queue = 0
            pool.last_checked = now
    def test_isbn_covers_are_imported_from_mapped_identifiers(self):
        # Now that we pass ISBN equivalents instead of Bibliotheca identifiers
        # to the Metadata Wrangler, they're not getting covers. Let's confirm
        # that the problem isn't on the Circulation Manager import side of things.

        # Create a Bibliotheca identifier with a license pool.
        source = DataSource.lookup(self._db, DataSource.BIBLIOTHECA)
        identifier = self._identifier(identifier_type=Identifier.BIBLIOTHECA_ID)
        LicensePool.for_foreign_id(
            self._db, source, identifier.type, identifier.identifier,
            collection=self.provider.collection
        )

        # Create an ISBN and set it equivalent.
        isbn = self._identifier(identifier_type=Identifier.ISBN)
        isbn.identifier = '9781594632556'
        identifier.equivalent_to(source, isbn, 1)

        opds = sample_data('metadata_isbn_response.opds', 'opds')
        self.provider.lookup_client.queue_response(
            200, {'content-type': 'application/atom+xml;profile=opds-catalog;kind=acquisition'}, opds
        )

        result = self.provider.process_item(identifier)
        # The lookup is successful
        eq_(result, identifier)
        # The appropriate cover links are transferred.
        identifier_uris = [l.resource.url for l in identifier.links
                           if l.rel in [Hyperlink.IMAGE, Hyperlink.THUMBNAIL_IMAGE]]
        expected = [
            'http://book-covers.nypl.org/Content%20Cafe/ISBN/9781594632556/cover.jpg',
            'http://book-covers.nypl.org/scaled/300/Content%20Cafe/ISBN/9781594632556/cover.jpg'
        ]

        eq_(sorted(identifier_uris), sorted(expected))

        # The ISBN doesn't get any information.
        eq_(isbn.links, [])
Beispiel #7
0
    def handle_event(self, threem_id, isbn, foreign_patron_id,
                     start_time, end_time, internal_event_type):
        # Find or lookup the LicensePool for this event.
        license_pool, is_new = LicensePool.for_foreign_id(
            self._db, self.api.source, Identifier.THREEM_ID, threem_id)

        if is_new:
            # Immediately acquire bibliographic coverage for this book.
            # This will set the DistributionMechanisms and make the
            # book presentation-ready. However, its circulation information
            # might not be up to date until we process some more events.
            record = self.bibliographic_coverage_provider.ensure_coverage(
                license_pool.identifier, force=True
            )

        threem_identifier = license_pool.identifier
        isbn, ignore = Identifier.for_foreign_id(
            self._db, Identifier.ISBN, isbn)

        edition, ignore = Edition.for_foreign_id(
            self._db, self.api.source, Identifier.THREEM_ID, threem_id)

        # The ISBN and the 3M identifier are exactly equivalent.
        threem_identifier.equivalent_to(self.api.source, isbn, strength=1)

        # Log the event.
        event, was_new = get_one_or_create(
            self._db, CirculationEvent, license_pool=license_pool,
            type=internal_event_type, start=start_time,
            foreign_patron_id=foreign_patron_id,
            create_method_kwargs=dict(delta=1,end=end_time)
            )

        # If this is our first time seeing this LicensePool, log its
        # occurance as a separate event
        if is_new:
            event = get_one_or_create(
                self._db, CirculationEvent,
                type=CirculationEvent.TITLE_ADD,
                license_pool=license_pool,
                create_method_kwargs=dict(
                    start=license_pool.last_checked or start_time,
                    delta=1,
                    end=license_pool.last_checked or end_time,
                )
            )
        title = edition.title or "[no title]"
        self.log.info("%r %s: %s", start_time, title, internal_event_type)
        return start_time
    def test_update_licensepool_with_holds(self):
        data, raw = self.sample_json("overdrive_availability_information_holds.json")
        identifier = self._identifier(
            identifier_type=Identifier.OVERDRIVE_ID
        )
        raw['id'] = identifier.identifier

        license_pool, is_new = LicensePool.for_foreign_id(
            self._db, DataSource.OVERDRIVE, identifier.type,
            identifier.identifier, collection=self._default_collection
        )
        pool, was_new, changed = self.api.update_licensepool_with_book_info(
            raw, license_pool, is_new
        )
        eq_(10, pool.patrons_in_hold_queue)
        eq_(True, changed)
    def test_generate_edition(self):
        # Create an ISBN with a LicensePool.
        identifier = self._identifier(identifier_type=Identifier.ISBN)
        lp = LicensePool.for_foreign_id(
            self._db, self.provider.data_source, identifier.type,
            identifier.identifier, collection=self._default_collection
        )[0]

        # Create editions and equivalencies for some OCLC equivalent identifiers.
        number_ed = self._edition(identifier_type=Identifier.OCLC_NUMBER)
        work_id_ed = self._edition(identifier_type=Identifier.OCLC_WORK)

        identifier.equivalent_to(
            self.provider.data_source, number_ed.primary_identifier, 1
        )
        identifier.equivalent_to(
            self.provider.data_source, work_id_ed.primary_identifier, 1
        )
        self._db.commit()

        number_ed_info = (number_ed.title, number_ed.author)
        work_id_ed_info = (work_id_ed.title, work_id_ed.author)

        def presentation_edition_info():
            return (lp.presentation_edition.title, lp.presentation_edition.author)

        # generate_edition sets a presentation_edition
        self.provider.generate_edition(identifier)
        assert presentation_edition_info() in [number_ed_info, work_id_ed_info]

        # (Remove the generated presentation_edition for next portion of the test.)
        combined_edition = lp.presentation_edition
        lp.presentation_edition = None
        for contribution in combined_edition.contributions:
            self._db.delete(contribution)
        self._db.delete(combined_edition)

        # When only one edition has title and author, that edition becomes the
        # the presentation edition.
        for contribution in work_id_ed.contributions:
            work_id_ed.author = None
            self._db.delete(contribution)
        self._db.commit()

        self.provider.generate_edition(identifier)
        eq_(number_ed_info, presentation_edition_info())
Beispiel #10
0
    def complaints(cls, library, title, url, annotator, pagination=None):
        _db = Session.object_session(library)
        facets = Facets.default(library)
        pagination = pagination or Pagination.default()

        q = LicensePool.with_complaint(library)
        results = pagination.apply(q).all()

        if len(results) > 0:
            (pools, counts) = zip(*results)
        else:
            pools = ()

        works = [pool.work for pool in pools]
        feed = cls(_db, title, url, works, annotator)

        # Render a 'start' link
        top_level_title = annotator.top_level_title()
        start_uri = annotator.groups_url(None)
        AdminFeed.add_link_to_feed(feed.feed, href=start_uri, rel="start", title=top_level_title)

        # Render an 'up' link, same as the 'start' link to indicate top-level feed
        AdminFeed.add_link_to_feed(feed.feed, href=start_uri, rel="up", title=top_level_title)

        if len(works) > 0:
            # There are works in this list. Add a 'next' link.
            AdminFeed.add_link_to_feed(feed.feed, rel="next", href=annotator.complaints_url(facets, pagination.next_page))

        if pagination.offset > 0:
            AdminFeed.add_link_to_feed(feed.feed, rel="first", href=annotator.complaints_url(facets, pagination.first_page))

        previous_page = pagination.previous_page
        if previous_page:
            AdminFeed.add_link_to_feed(feed.feed, rel="previous", href=annotator.complaints_url(facets, previous_page))

        annotator.annotate_feed(feed)
        return unicode(feed)
Beispiel #11
0
    def process_batch(self, identifiers):
        identifiers_by_bibliotheca_id = dict()
        bibliotheca_ids = set()
        for identifier in identifiers:
            bibliotheca_ids.add(identifier.identifier)
            identifiers_by_bibliotheca_id[identifier.identifier] = identifier

        identifiers_not_mentioned_by_bibliotheca = set(identifiers)
        now = datetime.datetime.utcnow()

        collection = self.api.collection
        for circ in self.api.get_circulation_for(bibliotheca_ids):
            if not circ:
                continue
            bibliotheca_id = circ[Identifier][Identifier.BIBLIOTHECA_ID]
            identifier = identifiers_by_bibliotheca_id[bibliotheca_id]
            identifiers_not_mentioned_by_bibliotheca.remove(identifier)
            pools = [lp for lp in identifier.licensed_through
                     if lp.data_source.name==DataSource.BIBLIOTHECA
                     and lp.collection == collection]
            if not pools:
                # We don't have a license pool for this work. That
                # shouldn't happen--how did we know about the
                # identifier?--but it shouldn't be a big deal to
                # create one.
                pool, ignore = LicensePool.for_foreign_id(
                    self._db, self.data_source, identifier.type,
                    identifier.identifier, collection=collection
                )

                # Bibliotheca books are never open-access.
                pool.open_access = False
                self.analytics.collect_event(
                    self._db, pool, CirculationEvent.DISTRIBUTOR_TITLE_ADD, now)
            else:
                [pool] = pools
                
            self.api.apply_circulation_information_to_licensepool(circ, pool, self.analytics)

        # At this point there may be some license pools left over
        # that Bibliotheca doesn't know about.  This is a pretty reliable
        # indication that we no longer own any licenses to the
        # book.
        for identifier in identifiers_not_mentioned_by_bibliotheca:
            pools = [lp for lp in identifier.licensed_through
                     if lp.data_source.name==DataSource.BIBLIOTHECA
                     and lp.collection == collection]
            if not pools:
                continue
            for pool in pools:
                if pool.licenses_owned > 0:
                    if pool.presentation_edition:
                        self.log.warn("Removing %s (%s) from circulation",
                                      pool.presentation_edition.title, pool.presentation_edition.author)
                    else:
                        self.log.warn(
                            "Removing unknown work %s from circulation.",
                            identifier.identifier
                        )
                pool.update_availability(0, 0, 0, 0, self.analytics)
                pool.last_checked = now
            status_code, headers, content = self.get(circulation_link, {})
        except Exception, e:
            status_code = None
            self.log.error(
                "HTTP exception communicating with Overdrive",
                exc_info=e
            )
        if status_code != 200:
            self.log.error(
                "Could not get availability for %s: status code %s",
                book['id'], status_code
            )
            return None, None, False

        book.update(json.loads(content))
        license_pool, is_new = LicensePool.for_foreign_id(
            self._db, DataSource.OVERDRIVE, Identifier.OVERDRIVE_ID, book_id)
        return self.update_licensepool_with_book_info(
            book, license_pool, is_new
        )

    def update_licensepool_with_book_info(self, book, license_pool, is_new):
        """Update a book's LicensePool with information from a JSON
        representation of its circulation info.

        Also creates an Edition and gives it very basic bibliographic
        information (the title), if possible.
        """
        circulation = OverdriveRepresentationExtractor.book_info_to_circulation(
            book
        )
        circulation_changed = circulation.update(license_pool, is_new)
Beispiel #13
0
    def update_licensepool_for_identifier(
            self, isbn, availability, medium, policy=None
    ):
        """Update availability information for a single book.

        If the book has never been seen before, a new LicensePool
        will be created for the book.

        The book's LicensePool will be updated with current approximate 
        circulation information (we can tell if it's available, but 
        not how many copies). 
        Bibliographic coverage will be ensured for the OneClick Identifier. 
        Work will be created for the LicensePool and set as presentation-ready.

        :param isbn the identifier OneClick uses
        :param availability boolean denoting if book can be lent to patrons 
        :param medium: The name OneClick uses for the book's medium.
        """

        # find a license pool to match the isbn, and see if it'll need a metadata update later
        license_pool, is_new_pool = LicensePool.for_foreign_id(
            self._db, DataSource.RB_DIGITAL, Identifier.RB_DIGITAL_ID, isbn,
            collection=self.collection
        )
        if is_new_pool:
            # This is the first time we've seen this book. Make sure its
            # identifier has bibliographic coverage.
            self.bibliographic_coverage_provider.ensure_coverage(
                license_pool.identifier
            )

        # now tell the licensepool if it's lendable

        # We don't know exactly how many licenses are available, but
        # we know that it's either zero (book is not lendable) or greater
        # than zero (book is lendable)
        licenses_available = 1
        if not availability:
            licenses_available = 0

        # Because the book showed up in availability, we know we own
        # at least one license to it.
        licenses_owned = 1

        if (not is_new_pool and 
            license_pool.licenses_owned == licenses_owned and 
            license_pool.licenses_available == licenses_available):
            # Optimization: Nothing has changed, so don't even bother
            # calling CirculationData.apply()
            return license_pool, is_new_pool, False

        # If possible, create a FormatData object representing
        # how the book is available.
        formats = []

        # Note that these strings are different from the similar strings
        # found in "fileFormat" when looking at a patron's loans.
        # "ebook" (a medium) versus "EPUB" (a format). Unfortunately we
        # don't get the file format when checking the book's
        # availability before a patron has checked it out.
        delivery_type = None
        drm_scheme = None
        medium = medium.lower()
        if medium == 'ebook':
            delivery_type = Representation.EPUB_MEDIA_TYPE
            # OneClick doesn't tell us the DRM scheme at this
            # point, but some of their EPUBs do have Adobe DRM.
            # Also, their DRM usage may change in the future.
            drm_scheme = DeliveryMechanism.ADOBE_DRM
        elif medium == 'eaudio':
            # TODO: we can't deliver on this promise yet, but this is
            # how we will be delivering audiobook manifests.
            delivery_type = Representation.AUDIOBOOK_MANIFEST_MEDIA_TYPE

        if delivery_type:
            formats.append(FormatData(delivery_type, drm_scheme))
        
        circulation_data = CirculationData(
            data_source=DataSource.RB_DIGITAL, 
            primary_identifier=license_pool.identifier, 
            licenses_owned=licenses_owned,
            licenses_available=licenses_available,
            formats=formats,
        )

        policy = policy or self.default_circulation_replacement_policy        
        license_pool, circulation_changed = circulation_data.apply(
            self._db,
            self.collection,
            replace=policy,
        )

        return license_pool, is_new_pool, circulation_changed
    def test_process_batch(self):
        provider = self._provider()

        # Here are an Edition and a LicensePool for the same identifier but
        # from different data sources. We would expect this to happen
        # when talking to the open-access content server.
        edition = self._edition(data_source_name=DataSource.OA_CONTENT_SERVER)
        identifier = edition.primary_identifier

        license_source = DataSource.lookup(self._db, DataSource.GUTENBERG)
        pool, is_new = LicensePool.for_foreign_id(
            self._db, license_source, identifier.type, identifier.identifier,
            collection=self._default_collection
        )
        eq_(None, pool.work)

        # Here's a second Edition/LicensePool that's going to cause a
        # problem: the LicensePool will show up in the results, but
        # the corresponding Edition will not.
        edition2, pool2 = self._edition(with_license_pool=True)

        # Here's an identifier that can't be looked up at all,
        # and an identifier that shows up in messages_by_id because
        # its simplified:message was determined to indicate success
        # rather than failure.
        error_identifier = self._identifier()
        not_an_error_identifier = self._identifier()
        messages_by_id = {
            error_identifier.urn : CoverageFailure(
                error_identifier, "500: internal error"
            ),
            not_an_error_identifier.urn : not_an_error_identifier,
        }

        # When we call CoverageProvider.process_batch(), it's going to
        # return the information we just set up: a matched
        # Edition/LicensePool pair, a mismatched LicensePool, and an
        # error message.
        provider.queue_import_results(
            [edition], [pool, pool2], [], messages_by_id
        )

        # Make the CoverageProvider do its thing.
        fake_batch = [object()]
        (success_import, failure_mismatched, failure_message,
         success_message) = provider.process_batch(
            fake_batch
        )

        # The fake batch was provided to lookup_and_import_batch.
        eq_([fake_batch], provider.batches)

        # The matched Edition/LicensePool pair was returned.
        eq_(success_import, edition.primary_identifier)

        # The LicensePool of that pair was passed into finalize_license_pool.
        # The mismatched LicensePool was not.
        eq_([pool], provider.finalized)

        # The mismatched LicensePool turned into a CoverageFailure
        # object.
        assert isinstance(failure_mismatched, CoverageFailure)
        eq_('OPDS import operation imported LicensePool, but no Edition.',
            failure_mismatched.exception)
        eq_(pool2.identifier, failure_mismatched.obj)
        eq_(True, failure_mismatched.transient)

        # The OPDSMessage with status code 500 was returned as a
        # CoverageFailure object.
        assert isinstance(failure_message, CoverageFailure)
        eq_("500: internal error", failure_message.exception)
        eq_(error_identifier, failure_message.obj)
        eq_(True, failure_message.transient)

        # The identifier that had a treat-as-success OPDSMessage was returned
        # as-is.
        eq_(not_an_error_identifier, success_message)
    def sync_bookshelf(self, patron, pin):

        # Get the external view of the patron's current state.
        remote_loans, remote_holds = self.patron_activity(patron, pin)

        # Get our internal view of the patron's current state.
        __transaction = self._db.begin_nested()
        local_loans = self._db.query(Loan).join(Loan.license_pool).filter(
            LicensePool.data_source_id.in_(self.data_source_ids_for_sync)
        ).filter(
            Loan.patron==patron
        )
        local_holds = self._db.query(Hold).join(Hold.license_pool).filter(
            LicensePool.data_source_id.in_(self.data_source_ids_for_sync)
        ).filter(
            Hold.patron==patron
        )

        now = datetime.datetime.utcnow()
        local_loans_by_identifier = {}
        local_holds_by_identifier = {}
        for l in local_loans:
            i = l.license_pool.identifier
            key = (i.type, i.identifier)
            local_loans_by_identifier[key] = l
        for h in local_holds:
            i = h.license_pool.identifier
            key = (i.type, i.identifier)
            local_holds_by_identifier[key] = h

        active_loans = []
        active_holds = []
        for loan in remote_loans:
            # This is a remote loan. Find or create the corresponding
            # local loan.
            source_name = self.identifier_type_to_data_source_name[
                loan.identifier_type
            ]
            source = DataSource.lookup(self._db, source_name)
            key = (loan.identifier_type, loan.identifier)
            pool, ignore = LicensePool.for_foreign_id(
                self._db, source, loan.identifier_type,
                loan.identifier)
            start = loan.start_date or now
            end = loan.end_date
            local_loan, new = pool.loan_to(patron, start, end)
            active_loans.append(local_loan)

            # Remove the local loan from the list so that we don't
            # delete it later.
            if key in local_loans_by_identifier:
                del local_loans_by_identifier[key]

        for hold in remote_holds:
            # This is a remote hold. Find or create the corresponding
            # local hold.
            key = (hold.identifier_type, hold.identifier)
            source_name = self.identifier_type_to_data_source_name[
                hold.identifier_type
            ]
            source = DataSource.lookup(self._db, source_name)
            pool, ignore = LicensePool.for_foreign_id(
                self._db, source, hold.identifier_type,
                hold.identifier)
            start = hold.start_date or now
            end = hold.end_date
            position = hold.hold_position
            local_hold, new = pool.on_hold_to(patron, start, end, position)
            active_holds.append(local_hold)

            # Remove the local hold from the list so that we don't
            # delete it later.
            if key in local_holds_by_identifier:
                del local_holds_by_identifier[key]

        # Every loan remaining in loans_by_identifier is a hold that
        # the provider doesn't know about, which means it's expired
        # and we should get rid of it.
        for loan in local_loans_by_identifier.values():
            if loan.license_pool.data_source.id in self.data_source_ids_for_sync:
                logging.info("In sync_bookshelf for patron %s, deleting loan %d (patron %s)" % (patron.authorization_identifier, loan.id, loan.patron.authorization_identifier))
                self._db.delete(loan)

        # Every hold remaining in holds_by_identifier is a hold that
        # the provider doesn't know about, which means it's expired
        # and we should get rid of it.
        for hold in local_holds_by_identifier.values():
            if hold.license_pool.data_source.id in self.data_source_ids_for_sync:
                self._db.delete(hold)
        __transaction.commit()
        return active_loans, active_holds
    def test_update_licensepool_provides_bibliographic_coverage(self):
        # Create an identifier.
        identifier = self._identifier(
            identifier_type=Identifier.OVERDRIVE_ID
        )

        # Prepare bibliographic and availability information
        # for this identifier.
        ignore, availability = self.sample_json(
            "overdrive_availability_information.json"
        )
        ignore, bibliographic = self.sample_json(
            "bibliographic_information.json"
        )

        # To avoid a mismatch, make it look like the information is
        # for the newly created Identifier.
        availability['id'] = identifier.identifier
        bibliographic['id'] = identifier.identifier

        self.api.queue_response(200, content=availability)
        self.api.queue_response(200, content=bibliographic)

        # Now we're ready. When we call update_licensepool, the
        # OverdriveAPI will retrieve the availability information,
        # then the bibliographic information. It will then trigger the
        # OverdriveBibliographicCoverageProvider, which will
        # create an Edition and a presentation-ready Work.
        pool, was_new, changed = self.api.update_licensepool(identifier.identifier)
        eq_(True, was_new)
        eq_(availability['copiesOwned'], pool.licenses_owned)

        edition = pool.presentation_edition
        eq_("Ancillary Justice", edition.title)

        eq_(True, pool.work.presentation_ready)
        assert pool.work.cover_thumbnail_url.startswith(
            'http://images.contentreserve.com/'
        )

        # The book has been run through the bibliographic coverage
        # provider.
        coverage = [
            x for x in identifier.coverage_records
            if x.operation is None
            and x.data_source.name == DataSource.OVERDRIVE
        ]
        eq_(1, len(coverage))

        # Call update_licensepool on an identifier that is missing a work and make
        # sure that it provides bibliographic coverage in that case.
        self._db.delete(pool.work)
        self._db.commit()
        pool, is_new = LicensePool.for_foreign_id(
            self._db, DataSource.OVERDRIVE, Identifier.OVERDRIVE_ID, identifier.identifier,
            collection=self.collection
        )
        ok_(not pool.work)
        self.api.queue_response(200, content=availability)
        self.api.queue_response(200, content=bibliographic)
        pool, was_new, changed = self.api.update_licensepool(identifier.identifier)
        eq_(False, was_new)
        eq_(True, pool.work.presentation_ready)
Beispiel #17
0
        if status_code != 200:
            self.log.error(
                "Could not get availability for %s: status code %s",
                book_id, status_code
            )
            return None, None, False

        if isinstance(content, basestring):
            content = json.loads(content)
        book.update(content)

        # Update book_id now that we know we have new data.
        book_id = book['id']
        license_pool, is_new = LicensePool.for_foreign_id(
            self._db, DataSource.OVERDRIVE, Identifier.OVERDRIVE_ID, book_id,
            collection=self.collection
        )
        if is_new or not license_pool.work:
            # Either this is the first time we've seen this book or its doesn't
            # have an associated work. Make sure its identifier has bibliographic coverage.
            self.overdrive_bibliographic_coverage_provider.ensure_coverage(
                license_pool.identifier,
                force=True
            )

        return self.update_licensepool_with_book_info(
            book, license_pool, is_new
        )

    # Alias for the CirculationAPI interface
    def update_availability(self, licensepool):
    def test_process_batch(self):
        provider = self._provider()

        # Here are an Edition and a LicensePool for the same identifier but
        # from different data sources. We would expect this to happen
        # when talking to the open-access content server.
        edition = self._edition(data_source_name=DataSource.OA_CONTENT_SERVER)
        identifier = edition.primary_identifier

        license_source = DataSource.lookup(self._db, DataSource.GUTENBERG)
        pool, is_new = LicensePool.for_foreign_id(
            self._db,
            license_source,
            identifier.type,
            identifier.identifier,
            collection=self._default_collection)
        eq_(None, pool.work)

        # Here's a second Edition/LicensePool that's going to cause a
        # problem: the LicensePool will show up in the results, but
        # the corresponding Edition will not.
        edition2, pool2 = self._edition(with_license_pool=True)

        # Here's an identifier that can't be looked up at all,
        # and an identifier that shows up in messages_by_id because
        # its simplified:message was determined to indicate success
        # rather than failure.
        error_identifier = self._identifier()
        not_an_error_identifier = self._identifier()
        messages_by_id = {
            error_identifier.urn:
            CoverageFailure(error_identifier, "500: internal error"),
            not_an_error_identifier.urn:
            not_an_error_identifier,
        }

        # When we call CoverageProvider.process_batch(), it's going to
        # return the information we just set up: a matched
        # Edition/LicensePool pair, a mismatched LicensePool, and an
        # error message.
        provider.queue_import_results([edition], [pool, pool2], [],
                                      messages_by_id)

        # Make the CoverageProvider do its thing.
        fake_batch = [object()]
        (success_import, failure_mismatched, failure_message,
         success_message) = provider.process_batch(fake_batch)

        # The fake batch was provided to lookup_and_import_batch.
        eq_([fake_batch], provider.batches)

        # The matched Edition/LicensePool pair was returned.
        eq_(success_import, edition.primary_identifier)

        # The LicensePool of that pair was passed into finalize_license_pool.
        # The mismatched LicensePool was not.
        eq_([pool], provider.finalized)

        # The mismatched LicensePool turned into a CoverageFailure
        # object.
        assert isinstance(failure_mismatched, CoverageFailure)
        eq_('OPDS import operation imported LicensePool, but no Edition.',
            failure_mismatched.exception)
        eq_(pool2.identifier, failure_mismatched.obj)
        eq_(True, failure_mismatched.transient)

        # The OPDSMessage with status code 500 was returned as a
        # CoverageFailure object.
        assert isinstance(failure_message, CoverageFailure)
        eq_("500: internal error", failure_message.exception)
        eq_(error_identifier, failure_message.obj)
        eq_(True, failure_message.transient)

        # The identifier that had a treat-as-success OPDSMessage was returned
        # as-is.
        eq_(not_an_error_identifier, success_message)
    def test_update_licensepool_provides_bibliographic_coverage(self):
        # Create an identifier.
        identifier = self._identifier(
            identifier_type=Identifier.OVERDRIVE_ID
        )

        # Prepare bibliographic and availability information
        # for this identifier.
        ignore, availability = self.sample_json(
            "overdrive_availability_information.json"
        )
        ignore, bibliographic = self.sample_json(
            "bibliographic_information.json"
        )

        # To avoid a mismatch, make it look like the information is
        # for the newly created Identifier.
        availability['id'] = identifier.identifier
        bibliographic['id'] = identifier.identifier

        self.api.queue_response(200, content=availability)
        self.api.queue_response(200, content=bibliographic)

        # Now we're ready. When we call update_licensepool, the
        # OverdriveAPI will retrieve the availability information,
        # then the bibliographic information. It will then trigger the
        # OverdriveBibliographicCoverageProvider, which will
        # create an Edition and a presentation-ready Work.
        pool, was_new, changed = self.api.update_licensepool(identifier.identifier)
        eq_(True, was_new)
        eq_(availability['copiesOwned'], pool.licenses_owned)

        edition = pool.presentation_edition
        eq_("Ancillary Justice", edition.title)

        eq_(True, pool.work.presentation_ready)
        assert pool.work.cover_thumbnail_url.startswith(
            'http://images.contentreserve.com/'
        )

        # The book has been run through the bibliographic coverage
        # provider.
        coverage = [
            x for x in identifier.coverage_records
            if x.operation is None
            and x.data_source.name == DataSource.OVERDRIVE
        ]
        eq_(1, len(coverage))

        # Call update_licensepool on an identifier that is missing a work and make
        # sure that it provides bibliographic coverage in that case.
        self._db.delete(pool.work)
        self._db.commit()
        pool, is_new = LicensePool.for_foreign_id(
            self._db, DataSource.OVERDRIVE, Identifier.OVERDRIVE_ID, identifier.identifier,
            collection=self.collection
        )
        ok_(not pool.work)
        self.api.queue_response(200, content=availability)
        self.api.queue_response(200, content=bibliographic)
        pool, was_new, changed = self.api.update_licensepool(identifier.identifier)
        eq_(False, was_new)
        eq_(True, pool.work.presentation_ready)