async def reindex_all(self, arguments): search = get_utility(ICatalogUtility) await asyncio.sleep(1) # since something initialize custom types... async for _, tm, container in get_containers(): try: self.reindexer = Reindexer( search, container, response=printer(), log_details=arguments.log_details, memory_tracking=arguments.memory_tracking, reindex_security=arguments.reindex_security, mapping_only=arguments.mapping_only, cache=False, ) await self.reindexer.reindex(container) seconds = int(time.time() - self.reindexer.start_time) logger.warning(f"""Finished reindex: Total Seconds: {seconds} Processed: {self.reindexer.processed} Indexed: {self.reindexer.indexed} Objects missing: {len(self.reindexer.missing)} Objects orphaned: {len(self.reindexer.orphaned)} """) finally: await tm.commit()
async def test_migrator_emmits_events_on_end(es_requester, event_handler): async with es_requester as requester: resp, status = await requester('POST', '/db/guillotina/', data=json.dumps({ '@type': 'Folder', 'title': 'Folder', 'id': 'foobar' })) container, req, txn, tm = await setup_txn_on_container(requester) search = getUtility(ICatalogUtility) gr.base.adapters.subscribe([IIndexProgress], None, event_handler.subscribe) migrator = Reindexer(search, container, force=True, request=req, reindex_security=True) ob = await container.async_get('foobar') await migrator.reindex(ob) assert event_handler.called == True assert len(event_handler.event) == 2 assert event_handler.event[0].completed == None assert event_handler.event[0].processed == 0 assert event_handler.event[1].completed == True assert event_handler.event[0].context == container
async def test_migrator_emmits_events_on_end(es_requester, event_handler): async with es_requester as requester: resp, status = await requester( "POST", "/db/guillotina/", data=json.dumps({ "@type": "Folder", "title": "Folder", "id": "foobar" }), ) container, req, txn, tm = await setup_txn_on_container(requester) search = get_utility(ICatalogUtility) gr.base.adapters.subscribe([IIndexProgress], None, event_handler.subscribe) migrator = Reindexer(search, container, force=True, reindex_security=True) ob = await container.async_get("foobar") await migrator.reindex(ob) assert event_handler.called is True assert len(event_handler.event) == 2 assert event_handler.event[0].completed is None assert event_handler.event[0].processed == 0 assert event_handler.event[1].completed is True assert event_handler.event[0].context == container
async def reindex_all_content(self, obj, security=False, response=noop_response, request=None): from guillotina_elasticsearch.reindex import Reindexer reindexer = Reindexer(self, obj, response=response, reindex_security=security) await reindexer.reindex(obj)
async def es_reindex(path, root, request, reindex_security=False): try: ob, end_path = await traverse(request, root, path.lstrip('/').split('/')) assert len(end_path) == 0 search = getUtility(ICatalogUtility) full = True if reindex_security: full = False reindexer = Reindexer(search, ob, reindex_security=reindex_security, full=full, request=request, log_details=True) await reindexer.reindex(ob) finally: txn = get_transaction(request) if txn is not None: tm = get_tm(request) await tm.abort(txn=txn)
async def test_migrator_emit_events_during_indexing(es_requester, event_handler): async with es_requester as requester: container, req, txn, tm = await setup_txn_on_container(requester) # pylint: disable=W0612 search = getUtility(ICatalogUtility) _marker = {} gr.base.adapters.subscribe([IIndexProgress], None, event_handler.subscribe) migrator = Reindexer(search, _marker, force=True, request=req, reindex_security=True) migrator.bulk_size = 0 migrator.batch = {} migrator.existing = {} migrator.processed = 1 migrator.missing = {'xx': 1} await migrator.attempt_flush() assert event_handler.called == True assert isinstance(event_handler.event[0], IndexProgress) assert event_handler.event[0].context == _marker
async def test_reindex_es(es_requester, command_arguments): async with es_requester as requester: resp, status = await requester( 'POST', '/db/guillotina/', data=json.dumps({ '@type': 'Folder', 'title': 'Folder', 'id': 'folder' }) ) assert status == 201 resp, status = await requester( 'POST', '/db/guillotina/folder', data=json.dumps({ '@type': 'Item', 'title': 'Item', 'id': 'item' }) ) assert status == 201 request = get_mocked_request(requester.db) root = await get_root(request) txn = await request._tm.begin(request) container = await root.async_get('guillotina') request.container = container search = get_utility(ICatalogUtility) reindexer = Reindexer(search, container, full=True) await asyncio.sleep(1) await search.refresh(container) index_name = await search.get_index_name(container) await search.unindex_all_children(container, index_name) await asyncio.sleep(1) await search.refresh(container) uids = await reindexer.get_all_uids() await reindexer.reindex(container) await asyncio.sleep(1) await search.refresh(container) uids = await reindexer.get_all_uids() assert len(uids) > 1 await search.unindex_all_children(container, index_name) task_info = Task(data={ "name": 'es-reindex', "guillotina": True, "args": { "path": get_full_content_path(request, container), } }) command_arguments.payload = task_info.serialize() command_arguments.task_id = None command_arguments.tags = [] wc = WorkerCommand(command_arguments) await wc.run(command_arguments, app_settings, None) await asyncio.sleep(1) await search.refresh(container) uids = await reindexer.get_all_uids() assert len(uids) > 1 await request._tm.abort(txn=txn)