Пример #1
0
    def _reindex(ids):
        if not ids:
            return

        request.tm.commit()
        indexer = index.BatchIndexer(request.db, request.es, request)
        indexer.index(ids)
Пример #2
0
def _reindex_annotations(request, ids):
    indexer = index.BatchIndexer(request.db, request.es, request)

    for _ in range(2):
        ids = indexer.index(ids)
        if not ids:
            break
Пример #3
0
    def test_index_returns_failed_bulk_actions_for_create_op_type(
            self, pyramid_request, es, db_session, streaming_bulk, factories):
        indexer = index.BatchIndexer(db_session,
                                     es,
                                     pyramid_request,
                                     op_type='create')

        ann_success_1, ann_success_2 = factories.Annotation(
        ), factories.Annotation()
        ann_fail_1, ann_fail_2 = factories.Annotation(), factories.Annotation()

        def fake_streaming_bulk(*args, **kwargs):
            for ann in args[1]:
                if ann.id in [ann_fail_1.id, ann_fail_2.id]:
                    yield (False, {
                        'create': {
                            '_id': ann.id,
                            'error': 'unknown error'
                        }
                    })
                elif ann.id in [ann_success_1.id, ann_success_2.id]:
                    yield (True, {'create': {'_id': ann.id}})

        streaming_bulk.side_effect = fake_streaming_bulk

        result = indexer.index()
        assert result == set([ann_fail_1.id, ann_fail_2.id])
Пример #4
0
    def test_index_allows_to_set_op_type(self, db_session, es, pyramid_request,
                                         streaming_bulk, factories):
        indexer = index.BatchIndexer(db_session,
                                     es,
                                     pyramid_request,
                                     op_type='create')
        annotation = factories.Annotation()
        db_session.add(annotation)
        db_session.flush()
        results = []

        def fake_streaming_bulk(*args, **kwargs):
            ann = list(args[1])[0]
            callback = kwargs.get('expand_action_callback')
            results.append(callback(ann))
            return set()

        streaming_bulk.side_effect = fake_streaming_bulk

        indexer.index()

        rendered = presenters.AnnotationSearchIndexPresenter(
            annotation).asdict()
        rendered['target'][0]['scope'] = [annotation.target_uri_normalized]
        assert results[0] == ({
            'create': {
                '_type': indexer.es_client.t.annotation,
                '_index': 'hypothesis',
                '_id': annotation.id
            }
        }, rendered)
Пример #5
0
    def test_index_ignores_document_exists_errors_for_op_type_create(self, db_session, es, pyramid_request, streaming_bulk, factories):
        indexer = index.BatchIndexer(db_session, es, pyramid_request,
                                     op_type='create')

        ann_success_1, ann_success_2 = factories.Annotation(), factories.Annotation()
        ann_fail_1, ann_fail_2 = factories.Annotation(), factories.Annotation()

        def fake_streaming_bulk(*args, **kwargs):
            for ann in args[1]:
                if ann.id in [ann_fail_1.id, ann_fail_2.id]:
                    error = 'DocumentAlreadyExistsException[[index-name][1] [annotation][gibberish]: ' \
                            'document already exists]'
                    yield (False, {'create': {'_id': ann.id, 'error': error}})
                elif ann.id in [ann_success_1.id, ann_success_2.id]:
                    yield (True, {'create': {'_id': ann.id}})

        streaming_bulk.side_effect = fake_streaming_bulk

        result = indexer.index()
        assert len(result) == 0
Пример #6
0
 def indexer(self, db_session, pyramid_request):
     return index.BatchIndexer(db_session, mock.MagicMock(),
                               pyramid_request)
Пример #7
0
 def indexer(self, db_session, es, pyramid_request):
     return index.BatchIndexer(db_session, es, pyramid_request)