Ejemplo n.º 1
0
    async def reindex(self, obj):
        index_manager = find_index_manager(obj)
        if index_manager is None:
            index_manager = get_adapter(self.request.container, IIndexManager)
        self.work_index_name = await index_manager.get_index_name()

        await notify(IndexProgress(
            self.request, self.context, 0, self.processed))
        await self.process_object(obj)
        await self.flush()
        if len(self.sub_indexes) > 0:
            # could cause sub indexes to need to be run through as well.
            for ob in self.sub_indexes:
                im = get_adapter(ob, IIndexManager)
                reindexer = Reindexer(
                    self.utility, ob, response=self.response, force=self.force,
                    log_details=self.log_details,
                    memory_tracking=self.memory_tracking,
                    request=self.request, bulk_size=self.bulk_size,
                    full=self.full, reindex_security=self.reindex_security,
                    mapping_only=self.mapping_only, index_manager=im)
                reindexer.processed = self.processed
                reindexer.work_index_name = await im.get_index_name()
                await reindexer.process_folder(ob)
                await reindexer.flush()
                self.processed = reindexer.processed

        await notify(IndexProgress(
            self.request, self.context, self.processed,
            self.processed, completed=True
        ))
Ejemplo n.º 2
0
    async def index_object(self, ob, full=False, lookup_index=False):
        batch_type = 'update'
        if self.reindex_security:
            data = ISecurityInfo(ob)()
        elif full or self.full:
            data = await ICatalogDataAdapter(ob)()
            batch_type = 'index'
        else:
            data = {
                # always need these...
                'type_name': ob.type_name
            }
            for index_name in self.mapping_diff.keys():
                val = await self.indexer.get_value(ob, index_name)
                if val is not None:
                    data[index_name] = val

        if ob._p_serial:
            data['tid'] = ob._p_serial
        self.indexed += 1
        self.batch[ob.uuid] = {'action': batch_type, 'data': data}

        if lookup_index:
            im = find_index_manager(ob)
            if im:
                self.batch[ob.uuid]['__index__'] = await im.get_index_name()

        if self.log_details:
            self.response.write(
                f'({self.processed} {int(self.per_sec())}) '
                f'Object: {get_content_path(ob)}, '
                f'Type: {batch_type}, Buffer: {len(self.batch)}')

        await self.attempt_flush()
Ejemplo n.º 3
0
 async def get_data(self, content, indexes=None):
     im = find_index_manager(content)
     # attempt to find index manager on parent of object we're
     # indexing and mark the object with the indexes we want
     # to store it in
     if im is not None:
         data = await super().get_data(content, indexes, await im.get_schemas())
         data['__indexes__'] = await im.get_indexes()
     else:
         data = await super().get_data(content, indexes)
     return data
Ejemplo n.º 4
0
    async def index_object(self, ob, full=False):
        batch_type = "update"
        if self.reindex_security:
            try:
                data = await apply_coroutine(ISecurityInfo(ob))
            except TypeError:
                self.response.write(f"Could not index {ob}")
                return
        elif full or self.full:
            try:
                data = await ICatalogDataAdapter(ob)()
            except TypeError:
                self.response.write(f"Could not index {ob}")
                return
            batch_type = "index"
        else:
            data = {
                # always need these...
                "type_name": ob.type_name
            }
            for index_name in self.mapping_diff.keys():
                val = await self.indexer.get_value(ob, index_name)
                if val is not None:
                    data[index_name] = val

        if ob.__serial__:
            data["tid"] = ob.__serial__
        self.indexed += 1
        self.batch[ob.uuid] = {"action": batch_type, "data": data}

        if self.lookup_index:
            im = find_index_manager(ob)
            if im:
                self.batch[ob.uuid]["__index__"] = await im.get_index_name()

        if self.log_details:
            self.response.write(
                f"({self.processed} {int(self.per_sec())}) "
                f"Object: {get_content_path(ob)}, "
                f"Type: {batch_type}, Buffer: {len(self.batch)}"
            )

        await self.attempt_flush()
Ejemplo n.º 5
0
    async def remove(self, container, objects, index_name=None, request=None):
        """List of UIDs to remove from index.

        It will remove all the children on the index"""
        if not self.enabled:
            return

        if len(objects) > 0:
            if index_name is None:
                indexes = await self.get_current_indexes(container)
            else:
                indexes = [index_name]
            bulk_data = []
            for obj in objects:
                item_indexes = indexes
                im = find_index_manager(obj)
                if im:
                    item_indexes = await im.get_indexes()
                for index in item_indexes:
                    bulk_data.append(
                        {"delete": {
                            "_index": index,
                            "_id": obj.__uuid__
                        }})
                if IFolder.providedBy(obj):
                    # only folders need to have children cleaned
                    if IIndexActive.providedBy(obj):
                        # delete this index...
                        im = get_adapter(obj, IIndexManager)
                        await self._delete_index(im)
                    else:
                        await self.unindex_all_children(
                            container, obj, index_name=",".join(item_indexes))
            conn = self.get_connection()
            await conn.bulk(
                index=indexes[0],
                body=bulk_data,
                doc_type=DOC_TYPE,
                refresh=self._refresh(),
            )