예제 #1
0
async def test_calculate_mapping_diff(es_requester):
    async with es_requester as requester:
        container, request, txn, tm = await setup_txn_on_container(requester)
        search = getUtility(ICatalogUtility)

        migrator = Migrator(search, container, force=True, request=request)
        version, new_index_name = await migrator.create_next_index()
        migrator.work_index_name = new_index_name

        mappings = get_mappings()
        index_settings = DEFAULT_SETTINGS.copy()
        index_settings.update(app_settings.get('index', {}))

        # tweak mappings so we can get the diff...
        for key, value in mappings.items():
            # need to modify on *all* or it won't work with ES..
            if 'creators' in value['properties']:
                value['properties']['creators']['type'] = 'text'
        mappings['Item']['properties']['foobar'] = {
            'type': 'keyword',
            'index': True
        }

        await search.conn.indices.close(new_index_name)
        await search.conn.indices.put_settings(index_settings, new_index_name)
        for key, value in mappings.items():
            await search.conn.indices.put_mapping(new_index_name, key, value)
        await search.conn.indices.open(new_index_name)

        diff = await migrator.calculate_mapping_diff()
        assert len(diff['Folder']) == 1
        assert len(diff['Item']) == 2
예제 #2
0
async def _test_migrate_while_content_getting_added(es_requester):
    async with es_requester as requester:
        add_count = await add_content(requester)

        container, request, txn, tm = await setup_txn_on_container(requester)

        search = getUtility(ICatalogUtility)
        await search.refresh(container)
        await asyncio.sleep(3)

        assert add_count == await search.get_doc_count(container)

        migrator = Migrator(search, container, force=True)
        add_content_task1 = asyncio.ensure_future(
            add_content(requester, base_id='foo1-'))
        add_content_task2 = asyncio.ensure_future(
            add_content(requester, base_id='foo2-'))
        reindex_task = asyncio.ensure_future(migrator.run_migration())

        await asyncio.wait(
            [add_content_task1, reindex_task, add_content_task2])
        await search.refresh(container)
        await asyncio.sleep(3)

        idx_count = await search.get_doc_count(container)
        # +1 here because container ob now indexed and it isn't by default in tests
        assert (add_count * 3) + 1 == idx_count

        await tm.abort(txn=txn)
예제 #3
0
    async def migrate_all(self, arguments):
        search = get_utility(ICatalogUtility)
        change_transaction_strategy('none')
        await asyncio.sleep(1)  # since something initialize custom types...
        async for _, tm, container in get_containers(self.request):
            try:
                self.migrator = Migrator(
                    search,
                    container,
                    response=printer(),
                    full=arguments.full,
                    force=arguments.force,
                    log_details=arguments.log_details,
                    memory_tracking=arguments.memory_tracking,
                    reindex_security=arguments.reindex_security,
                    mapping_only=arguments.mapping_only,
                    cache=False)
                await self.migrator.run_migration()
                seconds = int(time.time() - self.migrator.start_time)
                logger.warning(f'''Finished migration:
Total Seconds: {seconds}
Processed: {self.migrator.processed}
Indexed: {self.migrator.indexed}
Objects missing: {len(self.migrator.missing)}
Objects orphaned: {len(self.migrator.orphaned)}
Mapping Diff: {self.migrator.mapping_diff}
''')
            finally:
                await tm.commit(self.request)
async def test_calculate_mapping_diff(es_requester):
    async with es_requester as requester:
        container, request, txn, tm = await setup_txn_on_container(requester)
        search = getUtility(ICatalogUtility)

        migrator = Migrator(search, container, force=True, request=request)
        version, new_index_name = await migrator.create_next_index()
        migrator.work_index_name = new_index_name

        mappings = get_mappings()
        index_settings = DEFAULT_SETTINGS.copy()
        index_settings.update(app_settings.get('index', {}))

        # tweak mappings so we can get the diff...
        if 'creators' in mappings['properties']:
            mappings['properties']['creators']['type'] = 'text'
        mappings['properties']['foobar'] = {'type': 'keyword', 'index': True}

        await search.conn.indices.close(new_index_name)
        await search.conn.indices.put_settings(body=index_settings,
                                               index=new_index_name)
        await search.conn.indices.put_mapping(index=new_index_name,
                                              doc_type=DOC_TYPE,
                                              body=mappings)
        await search.conn.indices.open(new_index_name)

        diff = await migrator.calculate_mapping_diff()
        assert len(diff[DOC_TYPE]) == 2
async def test_removes_orphans(es_requester):
    async with es_requester as requester:
        container, request, txn, tm = await setup_txn_on_container(requester)
        search = get_utility(ICatalogUtility)
        await search.index(
            container, {"foobar": {
                "title": "foobar",
                "type_name": "Item"
            }})
        # foobar here is an orphaned object because it doesn't reference
        # an object

        im = get_adapter(container, IIndexManager)
        index_name = await im.get_index_name()  # alias

        doc = await search.get_connection().get(index=index_name,
                                                doc_type=DOC_TYPE,
                                                id="foobar")
        assert doc["found"]

        migrator = Migrator(search, container, force=True)
        await migrator.run_migration()

        async def _test():
            with pytest.raises(aioelasticsearch.exceptions.NotFoundError):
                await search.get_connection().get(index=index_name,
                                                  doc_type=DOC_TYPE,
                                                  id="foobar")

            assert len(migrator.orphaned) == 1
            assert migrator.orphaned[0] == "foobar"

        await run_with_retries(_test, requester)
async def test_delete_in_both_during_migration(es_requester):
    async with es_requester as requester:
        container, request, txn, tm = await setup_txn_on_container(requester)
        search = get_utility(ICatalogUtility)
        migrator = Migrator(search, container, force=True)
        im = get_adapter(container, IIndexManager)
        index_name = await im.get_index_name()
        next_index_name = await migrator.setup_next_index()

        resp, _ = await requester(
            "POST",
            "/db/guillotina",
            data=json.dumps({
                "@type": "Folder",
                "title": "Foobar1",
                "id": "foobar"
            }),
        )
        await requester("DELETE", "/db/guillotina/foobar")

        async def _test():
            with pytest.raises(aioelasticsearch.exceptions.NotFoundError):
                await search.get_connection().get(index=next_index_name,
                                                  doc_type="_all",
                                                  id=resp["@uid"])
            with pytest.raises(aioelasticsearch.exceptions.NotFoundError):
                await search.get_connection().get(index=index_name,
                                                  doc_type="_all",
                                                  id=resp["@uid"])

        await run_with_retries(_test, requester)
async def test_removes_orphans(es_requester):
    async with es_requester as requester:
        container, request, txn, tm = await setup_txn_on_container(requester)
        search = getUtility(ICatalogUtility)
        await search.index(
            container, {'foobar': {
                'title': 'foobar',
                'type_name': 'Item'
            }})
        # foobar here is an orphaned object because it doesn't reference an object

        index_name = await search.get_index_name(container)  # alias

        doc = await search.conn.get(index=index_name,
                                    doc_type=DOC_TYPE,
                                    id='foobar')
        assert doc['found']

        migrator = Migrator(search, container, force=True)
        await migrator.run_migration()

        async def _test():
            with pytest.raises(aioelasticsearch.exceptions.NotFoundError):
                await search.conn.get(index=index_name,
                                      doc_type=DOC_TYPE,
                                      id='foobar')

            assert len(migrator.orphaned) == 1
            assert migrator.orphaned[0] == 'foobar'

        await run_with_retries(_test, requester)
예제 #8
0
async def _test_new_deletes_are_performed_during_migration(es_requester):
    async with es_requester as requester:
        await add_content(requester)
        container, request, txn, tm = await setup_txn_on_container(requester)

        search = getUtility(ICatalogUtility)
        migrator = Migrator(search, container, force=True, request=request)
        await migrator.setup_next_index()
        await migrator.copy_to_next_index()

        await search.refresh(container, migrator.work_index_name)
        await search.refresh(container)
        num_docs = await search.get_doc_count(container, migrator.work_index_name)
        current_docs = await search.get_doc_count(container)
        assert num_docs == current_docs

        keys = await container.async_keys()
        key = random.choice(keys)
        ob = await container.async_get(key)
        keys = await ob.async_keys()
        key = random.choice(keys)
        ob = await ob.async_get(key)

        await search.remove(container, [(
            ob._p_oid, ob.type_name, get_content_path(ob)
        )], request=request)

        await search.refresh(container, migrator.work_index_name)
        await search.refresh(container)
        num_docs = await search.get_doc_count(container, migrator.work_index_name)
        current_count = await search.get_doc_count(container)
        assert num_docs == current_count
예제 #9
0
async def test_removes_orphans(es_requester):
    async with es_requester as requester:
        container, request, txn, tm = await setup_txn_on_container(requester)
        search = getUtility(ICatalogUtility)
        await search.index(
            container, {'foobar': {
                'title': 'foobar',
                'type_name': 'Item'
            }})
        # foobar here is an orphaned object because it doesn't reference an object

        index_name = await search.get_index_name(container)  # alias

        doc = await search.conn.get(index_name, 'foobar')
        assert doc['found']

        migrator = Migrator(search, container, force=True)
        await migrator.run_migration()
        await asyncio.sleep(1)
        await search.refresh(container, index_name)
        await asyncio.sleep(1)

        with pytest.raises(aioes.exception.NotFoundError):
            doc = await search.conn.get(index_name, 'foobar')

        assert len(migrator.orphaned) == 1
        assert migrator.orphaned[0] == 'foobar'
async def test_create_next_index(es_requester):
    async with es_requester as requester:
        container, request, txn, tm = await setup_txn_on_container(requester)
        search = get_utility(ICatalogUtility)
        migrator = Migrator(search, container, force=True, request=request)
        name = await migrator.create_next_index()
        assert name == 'guillotina-db-guillotina_2'
예제 #11
0
 def __init__(self,
              txn,
              tm,
              request,
              container,
              last_tid=-2,
              index_scroll='15m',
              hits_scroll='5m',
              use_tid_query=True):
     self.txn = txn
     self.tm = tm
     self.request = request
     self.container = container
     self.orphaned = set()
     self.missing = set()
     self.out_of_date = set()
     self.utility = getUtility(ICatalogUtility)
     self.migrator = Migrator(self.utility,
                              self.container,
                              full=True,
                              bulk_size=10)
     self.cache = LRU(200)
     self.last_tid = last_tid
     print(f'Last TID: {self.last_tid}')
     self.use_tid_query = use_tid_query
     self.last_zoid = None
     # for state tracking so we get boundries right
     self.last_result_set = []
     self.index_scroll = index_scroll
     self.hits_scroll = hits_scroll
예제 #12
0
async def _test_new_indexes_are_performed_during_migration(es_requester):
    async with es_requester as requester:
        await add_content(requester)
        container, request, txn, tm = await setup_txn_on_container(requester)

        search = getUtility(ICatalogUtility)
        migrator = Migrator(search, container, force=True, request=request)
        await migrator.setup_next_index()
        await migrator.copy_to_next_index()

        await asyncio.sleep(1)
        await search.refresh(container, migrator.work_index_name)
        await search.refresh(container)
        await asyncio.sleep(1)
        num_docs = await search.get_doc_count(container, migrator.work_index_name)
        assert num_docs == await search.get_doc_count(container)

        await add_content(requester, base_id='foobar1-')

        await asyncio.sleep(1)
        await search.refresh(container, migrator.work_index_name)
        await search.refresh(container)
        await asyncio.sleep(1)
        num_docs = await search.get_doc_count(container, migrator.work_index_name)
        assert num_docs == await search.get_doc_count(container)
async def test_delete_in_both_during_migration(es_requester):
    async with es_requester as requester:
        container, request, txn, tm = await setup_txn_on_container(requester)
        search = get_utility(ICatalogUtility)
        migrator = Migrator(search, container, force=True, request=request)
        im = get_adapter(container, IIndexManager)
        index_name = await im.get_index_name()
        next_index_name = await migrator.setup_next_index()

        resp, _ = await requester('POST',
                                  '/db/guillotina',
                                  data=json.dumps({
                                      '@type': 'Folder',
                                      'title': 'Foobar1',
                                      'id': 'foobar'
                                  }))
        await requester('DELETE', '/db/guillotina/foobar')

        async def _test():
            with pytest.raises(aioelasticsearch.exceptions.NotFoundError):
                await search.conn.get(index=next_index_name,
                                      doc_type='_all',
                                      id=resp['@uid'])
            with pytest.raises(aioelasticsearch.exceptions.NotFoundError):
                await search.conn.get(index=index_name,
                                      doc_type='_all',
                                      id=resp['@uid'])

        await run_with_retries(_test, requester)
async def test_search_works_on_new_docs_during_migration(es_requester):
    async with es_requester as requester:
        await add_content(requester, 2)
        container, request, txn, tm = await setup_txn_on_container(requester)
        search = get_utility(ICatalogUtility)
        migrator = Migrator(search, container, force=True, request=request)
        im = get_adapter(container, IIndexManager)
        index_name = await im.get_index_name()
        next_index_name = await migrator.setup_next_index()

        resp, _ = await requester('POST',
                                  '/db/guillotina',
                                  data=json.dumps({'@type': 'Item'}))

        async def _test():
            result1 = await search.conn.get(index=next_index_name,
                                            doc_type='_all',
                                            id=resp['@uid'])
            assert result1 is not None
            result2 = await search.conn.get(index=index_name,
                                            doc_type='_all',
                                            id=resp['@uid'])
            assert result2 is not None

        await run_with_retries(_test, requester)
예제 #15
0
async def es_migrate(path,
                     root,
                     request,
                     reindex_security=False,
                     mapping_only=False,
                     full=False,
                     force=False):
    try:
        ob, end_path = await traverse(request, root,
                                      path.lstrip('/').split('/'))
        if len(end_path) != 0:
            raise Exception('Could not found object')
        search = getUtility(ICatalogUtility)
        migrator = Migrator(search,
                            ob,
                            reindex_security=reindex_security,
                            full=full,
                            force=force,
                            mapping_only=mapping_only,
                            request=request,
                            log_details=True)
        await migrator.run_migration()
    finally:
        txn = get_transaction(request)
        if txn is not None:
            tm = get_tm(request)
            await tm.abort(txn=txn)
예제 #16
0
class MigrateCommand(Command):
    description = 'Migrate indexes'
    migrator = None

    def get_parser(self):
        parser = super(MigrateCommand, self).get_parser()
        parser.add_argument('--full',
                            help='Do a full reindex',
                            action='store_true')
        parser.add_argument('--force',
                            help='Override failing migration if existing '
                            'migration index exists',
                            action='store_true')
        parser.add_argument('--log-details', action='store_true')
        parser.add_argument('--memory-tracking', action='store_true')
        parser.add_argument('--reindex-security', action='store_true')
        parser.add_argument('--mapping-only', action='store_true')
        return parser

    async def migrate_all(self, arguments):
        search = get_utility(ICatalogUtility)
        change_transaction_strategy('none')
        await asyncio.sleep(1)  # since something initialize custom types...
        async for _, tm, container in get_containers(self.request):
            try:
                self.migrator = Migrator(
                    search,
                    container,
                    response=printer(),
                    full=arguments.full,
                    force=arguments.force,
                    log_details=arguments.log_details,
                    memory_tracking=arguments.memory_tracking,
                    reindex_security=arguments.reindex_security,
                    mapping_only=arguments.mapping_only,
                    cache=False)
                await self.migrator.run_migration()
                seconds = int(time.time() - self.migrator.start_time)
                logger.warning(f'''Finished migration:
Total Seconds: {seconds}
Processed: {self.migrator.processed}
Indexed: {self.migrator.indexed}
Objects missing: {len(self.migrator.missing)}
Objects orphaned: {len(self.migrator.orphaned)}
Mapping Diff: {self.migrator.mapping_diff}
''')
            finally:
                await tm.commit(self.request)

    def run(self, arguments, settings, app):
        loop = self.get_loop()
        try:
            loop.run_until_complete(self.migrate_all(arguments))
        except KeyboardInterrupt:  # pragma: no cover
            pass
        finally:
            if self.migrator.status != 'done':
                loop = self.get_loop()
                loop.run_until_complete(self.migrator.cancel_migration())
async def test_updates_index_data(es_requester):
    async with es_requester as requester:
        container, request, txn, tm = await setup_txn_on_container(requester)
        search = getUtility(ICatalogUtility)

        migrator = Migrator(search, container, force=True, request=request)
        version, new_index_name = await migrator.create_next_index()
        migrator.work_index_name = new_index_name
        await search.install_mappings_on_index(new_index_name)

        ob = create_content()
        ob.title = 'foobar'
        await migrator.index_object(ob, full=True)
        assert len(migrator.batch) == 1
        assert [v for v in migrator.batch.values()][0]['action'] == 'index'

        await migrator.flush()
        assert len(migrator.batch) == 0
        await migrator.join_futures()
        await asyncio.sleep(1)
        await search.refresh(container, new_index_name)
        await asyncio.sleep(1)
        assert await search.get_doc_count(container, new_index_name) == 1

        # test updating doc
        migrator.mapping_diff = {'Item': {'title': {}}}
        ob.title = 'foobar-new'
        await migrator.index_object(ob, full=False)
        assert len(migrator.batch) == 1
        assert [v for v in migrator.batch.values()][0]['action'] == 'update'

        assert len([v for v in migrator.batch.values()][0]['data']) == 2
        assert [v for v in migrator.batch.values()
                ][0]['data']['title'] == 'foobar-new'

        await migrator.flush()
        assert len(migrator.batch) == 0
        await migrator.join_futures()
        await asyncio.sleep(1)
        await search.refresh(container, new_index_name)
        await asyncio.sleep(1)
        doc = await search.conn.get(index=new_index_name,
                                    doc_type=DOC_TYPE,
                                    id=ob._p_oid)
        assert doc['_source']['title'] == 'foobar-new'
async def test_updates_index_data(es_requester):
    async with es_requester as requester:
        container, request, txn, tm = await setup_txn_on_container(requester)
        search = get_utility(ICatalogUtility)

        migrator = Migrator(search, container, force=True)
        new_index_name = await migrator.create_next_index()
        migrator.work_index_name = new_index_name

        ob = create_content()
        ob.title = "foobar"
        await migrator.index_object(ob, full=True)
        assert len(migrator.batch) == 1
        assert [v for v in migrator.batch.values()][0]["action"] == "index"

        await migrator.flush()
        assert len(migrator.batch) == 0
        await migrator.join_futures()
        await asyncio.sleep(1)
        await search.refresh(container, new_index_name)
        await asyncio.sleep(1)
        assert await search.get_doc_count(container, new_index_name) == 1

        # test updating doc
        migrator.mapping_diff = {"title": {}}
        ob.title = "foobar-new"
        await migrator.index_object(ob, full=False)
        assert len(migrator.batch) == 1
        assert [v for v in migrator.batch.values()][0]["action"] == "update"

        assert len([v for v in migrator.batch.values()][0]["data"]) == 2
        assert [v for v in migrator.batch.values()
                ][0]["data"]["title"] == "foobar-new"  # noqa

        await migrator.flush()
        assert len(migrator.batch) == 0
        await migrator.join_futures()
        await asyncio.sleep(1)
        await search.refresh(container, new_index_name)
        await asyncio.sleep(1)
        doc = await search.get_connection().get(index=new_index_name,
                                                doc_type=DOC_TYPE,
                                                id=ob.__uuid__)
        assert doc["_source"]["title"] == "foobar-new"
예제 #19
0
async def test_updates_index_name(es_requester):
    async with es_requester as requester:
        container, request, txn, tm = await setup_txn_on_container(requester)
        search = getUtility(ICatalogUtility)
        existing_index = await search.get_real_index_name(container)
        assert await search.conn.indices.exists(existing_index)
        migrator = Migrator(search, container, force=True, request=request)
        await migrator.run_migration()
        assert not await search.conn.indices.exists(existing_index)
        assert search.conn.indices.exists(migrator.work_index_name)
        assert await search.get_real_index_name(container
                                                ) == migrator.work_index_name
async def test_migrate_content_index_works(es_requester):
    async with es_requester as requester:
        add_count = await add_content(requester)
        cresp, _ = await requester(
            "POST",
            "/db/guillotina/",
            data=json.dumps({
                "@type": "UniqueIndexContent",
                "id": "foobar"
            }),
        )
        await requester(
            "POST",
            "/db/guillotina/foobar",
            data=json.dumps({"@type": "IndexItemContent"}),
        )

        container, request, txn, tm = await setup_txn_on_container(requester)

        search = get_utility(ICatalogUtility)
        await search.refresh(container)
        await asyncio.sleep(3)

        assert (add_count + 1) == await search.get_doc_count(
            container, "guillotina-db-guillotina_1")
        assert (await search.get_doc_count(
            container,
            "1_guillotina-db-guillotina__uniqueindexcontent-{}".format(  # noqa
                get_short_uid(cresp["@uid"])),
        ) == 1)

        migrator = Migrator(search, container, force=True)
        await migrator.run_migration()

        assert await search.get_connection().indices.exists(
            "guillotina-db-guillotina_2")
        assert not await search.get_connection().indices.exists(
            "guillotina-db-guillotina_1")
        assert await search.get_connection().indices.exists(
            "2_guillotina-db-guillotina__uniqueindexcontent-{}".format(
                get_short_uid(cresp["@uid"])))
        assert not await search.get_connection().indices.exists(
            "1_guillotina-db-guillotina__uniqueindexcontent-{}".format(
                get_short_uid(cresp["@uid"])))

        assert (add_count + 1) == await search.get_doc_count(
            container, "guillotina-db-guillotina_2")
        assert (await search.get_doc_count(
            container,
            "2_guillotina-db-guillotina__uniqueindexcontent-{}".format(  # noqa
                get_short_uid(cresp["@uid"])),
        ) == 1)
async def test_updates_index_name(es_requester):
    async with es_requester as requester:
        container, request, txn, tm = await setup_txn_on_container(requester)
        search = get_utility(ICatalogUtility)
        im = get_adapter(container, IIndexManager)
        existing_index = await im.get_real_index_name()
        assert await search.get_connection().indices.exists(existing_index)
        migrator = Migrator(search, container, force=True)
        await migrator.run_migration()
        assert not await search.get_connection().indices.exists(existing_index)
        assert await search.get_connection().indices.exists(
            migrator.work_index_name)
        assert await im.get_real_index_name() == migrator.work_index_name
async def test_calculate_mapping_diff(es_requester):
    async with es_requester as requester:
        container, request, txn, tm = await setup_txn_on_container(requester)
        search = get_utility(ICatalogUtility)

        index_manager = get_adapter(container, IIndexManager)
        migrator = Migrator(search, container, force=True)
        new_index_name = await index_manager.start_migration()
        migrator.work_index_name = new_index_name

        mappings = await index_manager.get_mappings()

        # tweak mappings so we can get the diff...
        if "creators" in mappings["properties"]:
            mappings["properties"]["creators"]["type"] = "text"
        mappings["properties"]["foobar"] = {"type": "keyword", "index": True}

        await search.create_index(new_index_name,
                                  index_manager,
                                  mappings=mappings)

        diff = await migrator.calculate_mapping_diff()
        assert len(diff) == 2
async def test_calculate_mapping_diff(es_requester):
    async with es_requester as requester:
        container, request, txn, tm = await setup_txn_on_container(requester)
        search = get_utility(ICatalogUtility)

        index_manager = get_adapter(container, IIndexManager)
        migrator = Migrator(search, container, force=True, request=request)
        new_index_name = await index_manager.start_migration()
        migrator.work_index_name = new_index_name

        mappings = await index_manager.get_mappings()

        # tweak mappings so we can get the diff...
        if 'creators' in mappings['properties']:
            mappings['properties']['creators']['type'] = 'text'
        mappings['properties']['foobar'] = {'type': 'keyword', 'index': True}

        await search.create_index(new_index_name,
                                  index_manager,
                                  mappings=mappings)

        diff = await migrator.calculate_mapping_diff()
        assert len(diff) == 2
예제 #24
0
 def __init__(self, txn, tm, request, container, last_tid=-2):
     self.txn = txn
     self.tm = tm
     self.request = request
     self.container = container
     self.orphaned = []
     self.missing = []
     self.utility = getUtility(ICatalogUtility)
     self.migrator = Migrator(
         self.utility, self.container, full=True, bulk_size=10)
     self.cache = LRU(200)
     self.last_tid = last_tid
     self.use_tid_query = True
     self.last_zoid = None
     # for state tracking so we get boundries right
     self.last_result_set = []
async def test_fixes_missing(es_requester):
    async with es_requester as requester:
        await add_content(requester, 2, 2)
        container, request, txn, tm = await setup_txn_on_container(requester)

        search = get_utility(ICatalogUtility)
        await asyncio.sleep(1)
        await search.refresh(container)
        await asyncio.sleep(1)
        original_count = await search.get_doc_count(container)

        keys = await container.async_keys()
        key = random.choice(keys)
        ob = await container.async_get(key)
        await search.remove(container, [(ob)], request=request)

        await asyncio.sleep(1)
        await search.refresh(container)
        await asyncio.sleep(1)
        assert original_count != await search.get_doc_count(container)
        im = get_adapter(container, IIndexManager)
        old_index_name = await im.get_real_index_name()

        responses = []

        class Writer:
            def write(self, item):
                responses.append(item)

        migrator = Migrator(search,
                            container,
                            force=True,
                            request=request,
                            response=Writer())
        await migrator.run_migration()

        assert migrator.status == 'done'

        await asyncio.sleep(1)
        await search.refresh(container)
        await asyncio.sleep(1)
        # new index should fix missing one, old index still has it missing
        num_docs = await search.get_doc_count(container,
                                              migrator.work_index_name)
        assert num_docs == original_count
        assert old_index_name != await im.get_real_index_name()
async def test_migrate_content_index_works(es_requester):
    async with es_requester as requester:
        add_count = await add_content(requester)
        cresp, _ = await requester('POST',
                                   '/db/guillotina/',
                                   data=json.dumps({
                                       '@type': 'UniqueIndexContent',
                                       'id': 'foobar'
                                   }))
        await requester('POST',
                        '/db/guillotina/foobar',
                        data=json.dumps({'@type': 'IndexItemContent'}))

        container, request, txn, tm = await setup_txn_on_container(requester)

        search = get_utility(ICatalogUtility)
        await search.refresh(container)
        await asyncio.sleep(3)

        assert (add_count + 1) == await search.get_doc_count(
            container, 'guillotina-db-guillotina_1')
        assert await search.get_doc_count(
            container,
            '1_guillotina-db-guillotina__uniqueindexcontent-{}'.format(  # noqa
                get_short_oid(cresp['@uid']))) == 1

        migrator = Migrator(search, container, force=True)
        await migrator.run_migration()

        assert await search.conn.indices.exists('guillotina-db-guillotina_2')
        assert not await search.conn.indices.exists(
            'guillotina-db-guillotina_1')
        assert await search.conn.indices.exists(
            '2_guillotina-db-guillotina__uniqueindexcontent-{}'.format(
                get_short_oid(cresp['@uid'])))
        assert not await search.conn.indices.exists(
            '1_guillotina-db-guillotina__uniqueindexcontent-{}'.format(
                get_short_oid(cresp['@uid'])))

        assert (add_count + 1) == await search.get_doc_count(
            container, 'guillotina-db-guillotina_2')
        assert await search.get_doc_count(
            container,
            '2_guillotina-db-guillotina__uniqueindexcontent-{}'.format(  # noqa
                get_short_oid(cresp['@uid']))) == 1
예제 #27
0
 def __init__(self, txn, tm, request, container, last_tid=-2):
     self.txn = txn
     self.tm = tm
     self.request = request
     self.container = container
     self.orphaned = set()
     self.missing = set()
     self.out_of_date = set()
     self.utility = get_utility(ICatalogUtility)
     self.migrator = Migrator(
         self.utility, self.container, full=True, bulk_size=10,
         lookup_index=True)
     self.index_manager = get_adapter(self.container, IIndexManager)
     self.cache = LRU(200)
     self.last_tid = last_tid
     self.use_tid_query = True
     self.last_zoid = None
     # for state tracking so we get boundries right
     self.last_result_set = []
예제 #28
0
async def test_migrate_get_all_uids(es_requester):
    async with es_requester as requester:
        await add_content(requester)

        container, request, txn, tm = await setup_txn_on_container(requester)

        search = getUtility(ICatalogUtility)
        await asyncio.sleep(1)
        await search.refresh(container)
        await asyncio.sleep(1)

        current_count = await search.get_doc_count(container)

        migrator = Migrator(search, container, force=True)
        uids = await migrator.get_all_uids()

        assert len(uids) == current_count

        await tm.abort(txn=txn)
async def test_search_works_on_updated_docs_during_migration_when_present(
    es_requester, ):  # noqa
    """
    - started migration
    - doc update
    - doc also updated in next index
    """
    async with es_requester as requester:
        container, request, txn, tm = await setup_txn_on_container(requester)
        search = get_utility(ICatalogUtility)
        migrator = Migrator(search, container, force=True)
        im = get_adapter(container, IIndexManager)
        index_name = await im.get_index_name()
        next_index_name = await migrator.setup_next_index()

        resp, _ = await requester(
            "POST",
            "/db/guillotina",
            data=json.dumps({
                "@type": "Item",
                "title": "Foobar1",
                "id": "foobar"
            }),
        )
        await asyncio.sleep(1)
        await requester("PATCH",
                        "/db/guillotina/foobar",
                        data=json.dumps({"title": "Foobar2"}))

        async def _test():
            result1 = await search.get_connection().get(index=next_index_name,
                                                        doc_type="_all",
                                                        id=resp["@uid"])
            assert result1 is not None
            assert result1["_source"]["title"] == "Foobar2"
            result2 = await search.get_connection().get(index=index_name,
                                                        doc_type="_all",
                                                        id=resp["@uid"])
            assert result2 is not None
            assert result2["_source"]["title"] == "Foobar2"

        await run_with_retries(_test, requester)
예제 #30
0
async def test_unindex_during_next_index(es_requester):
    async with es_requester as requester:
        await add_content(requester, 2)
        container, request, txn, tm = await setup_txn_on_container(requester)
        search = getUtility(ICatalogUtility)
        migrator = Migrator(search, container, force=True, request=request)
        next_index_version, work_index_name = await migrator.create_next_index(
        )
        await search.install_mappings_on_index(work_index_name)
        await search.activate_next_index(container,
                                         next_index_version,
                                         request=request)
        await tm.commit(txn=txn)
        container, request, txn, tm = await setup_txn_on_container(requester)
        keys = await container.async_keys()
        item = await container.async_get(keys[0])
        aiotask_context.set('request', request)
        await notify(ObjectRemovedEvent(item, container, item.id))
        request.execute_futures()
        await asyncio.sleep(1)