def test_exception_exc_info(self): """ BoundLogger.exception sets exc_info=True. """ bl = BoundLogger(ReturnLogger(), [], {}) assert ((), {"exc_info": True, "event": "event"}) == bl.exception('event')
def test_proxies_exception(self): """ BoundLogger.exception is proxied to Logger.error. """ bl = BoundLogger(ReturnLogger(), [return_method_name], {}) assert "error" == bl.exception("event")
def test_exception_exc_info_override(self): """ If *exc_info* is password to exception, it's used. """ bl = BoundLogger(ReturnLogger(), [], {}) assert ((), {"exc_info": 42, "event": "event"}) == bl.exception( "event", exc_info=42 )
def test_exception_maps_to_error(self): bl = BoundLogger(ReturnLogger(), [return_method_name], {}) assert "error" == bl.exception("event")
async def ingest_ltd_lander_jsonld_document( *, app: web.Application, logger: BoundLogger, url_ingest_message: Dict[str, Any], ) -> None: """Run the Algolia ingest of a LTD_LANDER_JSONLD content type. Parameters ---------- app : `aiohttp.web.Application` The app. logger A structlog logger that is bound with context about the Kafka message. url_ingest_message : `dict` The deserialized value of the Kafka message. """ logger = logger.bind( content_url=url_ingest_message["url"], content_type=url_ingest_message["content_type"], ) logger.info("Starting LTD_LANDER_JSONLD ingest") http_session = app["safir/http_session"] edition_data = await get_json_data( url=url_ingest_message["edition"]["url"], logger=logger, http_session=http_session, ) published_url = edition_data["published_url"] jsonld_name = "metadata.jsonld" if published_url.endswith("/"): jsonld_url = f"{published_url}{jsonld_name}" else: jsonld_url = f"{published_url}/{jsonld_name}" try: metadata = await get_json_data( url=jsonld_url, logger=logger, http_session=http_session, # by-pass aiohttp's encoding check; the jsonld files do not have # correct CONTENT-TYPE headers. encoding="utf-8", content_type=None, ) except Exception: logger.exception("Failure getting metadata.jsonld", jsonld_url=jsonld_url) raise try: reduced_document = ReducedLtdLanderDocument(url=published_url, metadata=metadata, logger=logger) except Exception: logger.exception("Failed to build record") raise surrogate_key = generate_surrogate_key() logger.debug("Reduced LTD Lander Document", chunks=len(reduced_document.chunks)) try: records = [ create_record( chunk=s, document=reduced_document, surrogate_key=surrogate_key, ) for s in reduced_document.chunks ] description_chunk = ContentChunk( headers=[reduced_document.h1], content=reduced_document.description, ) records.append( create_record( chunk=description_chunk, document=reduced_document, surrogate_key=surrogate_key, )) except Exception: logger.exception("Failed to build records") raise logger.info("Finished building records") if app["ook/algolia_search"] is not None: try: client = app["ook/algolia_search"] index = client.init_index( app["safir/config"].algolia_document_index_name) except Exception: logger.exception( "Error initializing Algolia index", index_name=app["safir/config"].algolia_document_index_name, ) raise tasks = [index.save_object_async(record) for record in records] try: results = await asyncio.gather(*tasks) MultipleResponse(results).wait() except Exception: logger.error("Got algoliasearch request error") for record in records: logger.debug(json.dumps(record, indent=2, sort_keys=True)) logger.info("Finished uploading to Algolia") await delete_old_records( index=index, base_url=records[0]["baseUrl"], surrogate_key=surrogate_key, logger=logger, )