def batch_indexing_offers_in_algolia_from_database( client: Redis, ending_page: int = None, limit: int = 10000, starting_page: int = 0) -> None: page_number = starting_page has_still_offers = True while has_still_offers: if ending_page: if ending_page == page_number: break offer_ids_as_tuple = offer_queries.get_paginated_active_offer_ids( limit=limit, page=page_number) offer_ids_as_int = from_tuple_to_int(offer_ids=offer_ids_as_tuple) if len(offer_ids_as_int) > 0: logger.info( "[ALGOLIA] processing offers of database from page %s...", page_number) process_eligible_offers(client=client, offer_ids=offer_ids_as_int, from_provider_update=False) logger.info("[ALGOLIA] offers of database from page %s processed!", page_number) else: has_still_offers = False logger.info( "[ALGOLIA] processing of offers from database finished!") page_number += 1
def batch_processing_offer_ids_in_error(client: Redis): offer_ids_in_error = get_offer_ids_in_error(client=client) if len(offer_ids_in_error) > 0: process_eligible_offers(client=client, offer_ids=offer_ids_in_error, from_provider_update=False) delete_offer_ids_in_error(client=client)
def batch_indexing_offers_in_algolia_by_venue(client: Redis) -> None: venue_ids = get_venue_ids(client=client) if len(venue_ids) > 0: for venue_id in venue_ids: page = 0 has_still_offers = True while has_still_offers: offer_ids_as_tuple = offer_queries.get_paginated_offer_ids_by_venue_id( limit=settings.ALGOLIA_OFFERS_BY_VENUE_CHUNK_SIZE, page=page, venue_id=venue_id) offer_ids_as_int = from_tuple_to_int(offer_ids_as_tuple) if len(offer_ids_as_int) > 0: logger.info( "[ALGOLIA] processing offers for venue %s from page %s...", venue_id, page) process_eligible_offers(client=client, offer_ids=offer_ids_as_int, from_provider_update=False) logger.info( "[ALGOLIA] offers for venue %s from page %s processed!", venue_id, page) else: has_still_offers = False logger.info( "[ALGOLIA] processing of offers for venue %s finished!", venue_id) page += 1 delete_venue_ids(client=client)
def create_industrial_algolia_indexed_objects() -> None: if settings.ALGOLIA_TRIGGER_INDEXATION: logger.info("create_industrial_algolia_objects") offer_ids = Offer.query.with_entities(Offer.id).all() clear_index() delete_all_indexed_offers(client=app.redis_client) process_eligible_offers(client=app.redis_client, offer_ids=offer_ids, from_provider_update=False)
def batch_indexing_offers_in_algolia_by_offer(client: Redis) -> None: offer_ids = get_offer_ids(client=client) if len(offer_ids) > 0: logger.info("[ALGOLIA] processing %i offers...", len(offer_ids)) process_eligible_offers(client=client, offer_ids=offer_ids, from_provider_update=False) delete_offer_ids(client=client) logger.info("[ALGOLIA] %i offers processed!", len(offer_ids))
def test_should_not_reindex_offers_that_are_already_indexed_if_offer_name_has_not_changed( self, mock_add_objects, mock_build_object, mock_delete_objects, mock_check_offer_exists, mock_get_offer_details, mock_add_to_indexed_offers, app, ): # Given client = MagicMock() client.pipeline = MagicMock() client.pipeline.return_value = MagicMock() mock_pipeline = client.pipeline() mock_pipeline.execute = MagicMock() mock_pipeline.reset = MagicMock() offerer = create_offerer(is_active=True, validation_token=None) venue = create_venue(offerer=offerer, validation_token=None) offer1 = create_offer_with_thing_product(thing_name="super offre 1", venue=venue, is_active=True) stock1 = create_stock(booking_limit_datetime=TOMORROW, offer=offer1, quantity=1) repository.save(stock1) offer_ids = [offer1.id] mock_build_object.side_effect = [ { "fake": "object" }, ] mock_check_offer_exists.return_value = True mock_get_offer_details.return_value = { "name": "super offre 1", "dates": [], "prices": [10.0] } # When process_eligible_offers(client=client, offer_ids=offer_ids, from_provider_update=True) # Then assert mock_check_offer_exists.call_count == 1 assert mock_get_offer_details.call_count == 1 assert mock_add_to_indexed_offers.call_count == 0 assert mock_add_objects.call_count == 0 assert mock_pipeline.execute.call_count == 0 assert mock_pipeline.reset.call_count == 0 assert mock_delete_objects.call_count == 0
def test_should_delete_objects_when_objects_are_not_eligible_and_were_already_indexed( self, mock_build_object, mock_add_objects, mock_delete_objects, mock_add_to_indexed_offers, mock_check_offer_exists, mock_delete_indexed_offers, mock_add_offer_ids_in_error, app, ): # Given client = MagicMock() client.pipeline = MagicMock() client.pipeline.return_value = MagicMock() mock_pipeline = client.pipeline() mock_pipeline.execute = MagicMock() mock_pipeline.reset = MagicMock() offerer = create_offerer(is_active=True, validation_token=None) venue = create_venue(offerer=offerer, validation_token=None) offer1 = create_offer_with_thing_product(venue=venue, is_active=True) stock1 = create_stock(booking_limit_datetime=TOMORROW, offer=offer1, quantity=0) offer2 = create_offer_with_thing_product(venue=venue, is_active=True) stock2 = create_stock(booking_limit_datetime=TOMORROW, offer=offer2, quantity=0) repository.save(stock1, stock2) mock_check_offer_exists.side_effect = [True, True] # When process_eligible_offers(client=client, offer_ids=[offer1.id, offer2.id], from_provider_update=False) # Then mock_build_object.assert_not_called() mock_add_objects.assert_not_called() mock_add_to_indexed_offers.assert_not_called() mock_delete_objects.assert_called_once() assert mock_delete_objects.call_args_list == [ call(object_ids=[humanize(offer1.id), humanize(offer2.id)]) ] mock_delete_indexed_offers.assert_called_once() assert mock_delete_indexed_offers.call_args_list == [ call(client=client, offer_ids=[offer1.id, offer2.id]) ] mock_pipeline.execute.assert_not_called() mock_pipeline.reset.assert_not_called() mock_add_offer_ids_in_error.assert_not_called()
def _process_venue_provider(client: Redis, provider_id: str, venue_provider_id: int, venue_id: int) -> None: has_still_offers = True page = 0 try: while has_still_offers is True: offer_ids_as_tuple = offer_queries.get_paginated_offer_ids_by_venue_id_and_last_provider_id( last_provider_id=provider_id, limit=settings.ALGOLIA_OFFERS_BY_VENUE_PROVIDER_CHUNK_SIZE, page=page, venue_id=venue_id, ) offer_ids_as_int = from_tuple_to_int(offer_ids_as_tuple) if len(offer_ids_as_tuple) > 0: logger.info( "[ALGOLIA] processing offers for (venue %s / provider %s) from page %s...", venue_id, provider_id, page, ) process_eligible_offers(client=client, offer_ids=offer_ids_as_int, from_provider_update=True) logger.info( "[ALGOLIA] offers for (venue %s / provider %s) from page %s processed", venue_id, provider_id, page, ) page += 1 else: has_still_offers = False logger.info( "[ALGOLIA] processing of offers for (venue %s / provider %s) finished!", venue_id, provider_id) except Exception as error: # pylint: disable=broad-except logger.exception( "[ALGOLIA] processing of offers for (venue %s / provider %s) failed! %s", venue_id, provider_id, error, ) finally: delete_venue_provider_currently_in_sync( client=client, venue_provider_id=venue_provider_id)
def test_should_not_delete_offers_that_are_not_already_indexed( self, mock_delete_objects, mock_check_offer_exists, mock_delete_indexed_offers, app): # Given client = MagicMock() client.pipeline = MagicMock() client.pipeline.return_value = MagicMock() mock_pipeline = client.pipeline() mock_pipeline.execute = MagicMock() mock_pipeline.reset = MagicMock() offerer = create_offerer(is_active=True, validation_token=None) venue = create_venue(offerer=offerer, validation_token=None) offer1 = create_offer_with_thing_product(thing_name="super offre 1", venue=venue, is_active=False) stock1 = create_stock(booking_limit_datetime=TOMORROW, offer=offer1, quantity=1) offer2 = create_offer_with_thing_product(thing_name="super offre 2", venue=venue, is_active=False) stock2 = create_stock(booking_limit_datetime=TOMORROW, offer=offer2, quantity=1) repository.save(stock1, stock2) offer_ids = [offer1.id, offer2.id] mock_check_offer_exists.side_effect = [False, False] # When process_eligible_offers(client=client, offer_ids=offer_ids, from_provider_update=True) # Then assert mock_check_offer_exists.call_count == 2 assert mock_delete_objects.call_count == 0 assert mock_delete_indexed_offers.call_count == 0
def test_should_add_offer_ids_in_error_when_adding_objects_failed( self, mock_build_object, mock_check_offer_exists, mock_add_objects, mock_delete_objects, mock_add_to_indexed_offers, mock_delete_indexed_offers, mock_add_offer_ids_in_error, app, ): # Given client = MagicMock() client.pipeline = MagicMock() client.pipeline.return_value = MagicMock() mock_pipeline = client.pipeline() mock_pipeline.execute = MagicMock() mock_pipeline.reset = MagicMock() offerer = create_offerer(is_active=True, validation_token=None) venue = create_venue(offerer=offerer, validation_token=None) offer1 = create_offer_with_thing_product(venue=venue, is_active=True) stock1 = create_stock(booking_limit_datetime=TOMORROW, offer=offer1, quantity=10) offer2 = create_offer_with_thing_product(venue=venue, is_active=True) stock2 = create_stock(booking_limit_datetime=TOMORROW, offer=offer2, quantity=10) repository.save(stock1, stock2) mock_check_offer_exists.side_effect = [False, False] mock_add_objects.side_effect = [AlgoliaException] # When process_eligible_offers(client=client, offer_ids=[offer1.id, offer2.id], from_provider_update=False) # Then assert mock_build_object.call_count == 2 mock_add_objects.assert_called_once_with(objects=[{ "fake": "test" }, { "fake": "test" }]) assert mock_add_to_indexed_offers.call_count == 2 assert mock_add_to_indexed_offers.call_args_list == [ call( pipeline=mock_pipeline, offer_details={ "name": "Test Book", "dates": [], "prices": [10.0] }, offer_id=offer1.id, ), call( pipeline=mock_pipeline, offer_details={ "name": "Test Book", "dates": [], "prices": [10.0] }, offer_id=offer2.id, ), ] mock_delete_indexed_offers.assert_not_called() mock_delete_objects.assert_not_called() mock_pipeline.execute.assert_not_called() mock_pipeline.reset.assert_called_once() assert mock_add_offer_ids_in_error.call_args_list == [ call(client=client, offer_ids=[offer1.id, offer2.id]) ]
def test_should_reindex_offers_only_when_stocks_beginning_datetime_have_changed( self, mock_add_objects, mock_build_object, mock_delete_objects, mock_check_offer_exists, mock_get_offer_details, mock_add_to_indexed_offers, app, ): # Given client = MagicMock() client.pipeline = MagicMock() client.pipeline.return_value = MagicMock() mock_pipeline = client.pipeline() mock_pipeline.execute = MagicMock() mock_pipeline.reset = MagicMock() offerer = create_offerer(is_active=True, validation_token=None) venue = create_venue(offerer=offerer, validation_token=None) offer = create_offer_with_event_product(date_created=datetime( 2019, 1, 1), event_name="super offre 1", venue=venue, is_active=True) stock = create_stock( beginning_datetime=datetime(2019, 1, 5), booking_limit_datetime=datetime(2019, 1, 3), offer=offer, quantity=1, ) repository.save(stock) offer_ids = [offer.id] mock_build_object.side_effect = [ { "fake": "object" }, ] mock_check_offer_exists.return_value = True mock_get_offer_details.return_value = { "name": "super offre 1", "dates": [1515542400.0], "prices": [10.0] } # When process_eligible_offers(client=client, offer_ids=offer_ids, from_provider_update=True) # Then assert mock_check_offer_exists.call_count == 1 assert mock_get_offer_details.call_count == 1 assert mock_add_to_indexed_offers.call_count == 1 assert mock_add_to_indexed_offers.call_args_list == [ call( pipeline=mock_pipeline, offer_details={ "name": "super offre 1", "dates": [1546646400.0], "prices": [10.0] }, offer_id=offer.id, ), ] assert mock_add_objects.call_count == 1 assert mock_add_objects.call_args_list == [ call(objects=[{ "fake": "object" }]) ] assert mock_pipeline.execute.call_count == 1 assert mock_pipeline.reset.call_count == 1 assert mock_delete_objects.call_count == 0
def test_should_delete_offers_that_are_already_indexed( self, mock_add_objects, mock_build_object, mock_delete_objects, mock_check_offer_exists, mock_get_offer_details, mock_add_to_indexed_offers, mock_delete_indexed_offers, app, ): # Given client = MagicMock() client.pipeline = MagicMock() client.pipeline.return_value = MagicMock() mock_pipeline = client.pipeline() mock_pipeline.execute = MagicMock() mock_pipeline.reset = MagicMock() offerer = create_offerer(is_active=True, validation_token=None) venue = create_venue(offerer=offerer, validation_token=None) offer1 = create_offer_with_thing_product(thing_name="super offre 1", venue=venue, is_active=False) stock1 = create_stock(booking_limit_datetime=TOMORROW, offer=offer1, quantity=1) offer2 = create_offer_with_thing_product(thing_name="super offre 2", venue=venue, is_active=False) stock2 = create_stock(booking_limit_datetime=TOMORROW, offer=offer2, quantity=1) offer3 = create_offer_with_thing_product(thing_name="super offre 3", venue=venue, is_active=False) stock3 = create_stock(booking_limit_datetime=TOMORROW, offer=offer3, quantity=1) repository.save(stock1, stock2, stock3) offer_ids = [offer1.id, offer2.id, offer3.id] mock_check_offer_exists.side_effect = [True, True, True] # When process_eligible_offers(client=client, offer_ids=offer_ids, from_provider_update=True) # Then assert mock_check_offer_exists.call_count == 3 assert mock_build_object.call_count == 0 assert mock_add_objects.call_count == 0 assert mock_get_offer_details.call_count == 0 assert mock_add_to_indexed_offers.call_count == 0 assert mock_delete_objects.call_count == 1 assert mock_delete_objects.call_args_list == [ call(object_ids=[ humanize(offer1.id), humanize(offer2.id), humanize(offer3.id) ]) ] assert mock_delete_indexed_offers.call_count == 1 assert mock_delete_indexed_offers.call_args_list == [ call(client=client, offer_ids=[offer1.id, offer2.id, offer3.id]) ] assert mock_pipeline.execute.call_count == 0 assert mock_pipeline.reset.call_count == 0