コード例 #1
0
def es(app):
    """Provide elasticsearch access."""
    try:
        list(current_search.create())
    except RequestError:
        list(current_search.delete(ignore=[404]))
        list(current_search.create())
    yield current_search
    list(current_search.delete(ignore=[404]))
コード例 #2
0
def es(app):
    """Elasticsearch fixture."""
    try:
        list(current_search.create())
    except RequestError:
        list(current_search.delete(ignore=[404]))
        list(current_search.create(ignore=[400]))
    current_search_client.indices.refresh()
    yield current_search_client
    list(current_search.delete(ignore=[404]))
コード例 #3
0
def es(app):
    """Provide elasticsearch access."""
    try:
        list(current_search.create())
    except RequestError:
        list(current_search.delete(ignore=[400, 404]))
        list(current_search.create())
    current_search_client.indices.refresh()
    yield current_search_client
    list(current_search.delete(ignore=[404]))
コード例 #4
0
ファイル: conftest.py プロジェクト: lnielsen/zenodo
def es(app):
    """Provide elasticsearch access."""
    try:
        list(current_search.create())
    except RequestError:
        list(current_search.delete(ignore=[400, 404]))
        list(current_search.create())
    current_search_client.indices.refresh()
    yield current_search_client
    list(current_search.delete(ignore=[404]))
コード例 #5
0
ファイル: conftest.py プロジェクト: drjova/invenio-deposit
def es(app):
    """Elasticsearch fixture."""
    try:
        list(current_search.create())
    except RequestError:
        list(current_search.delete(ignore=[404]))
        list(current_search.create(ignore=[400]))
    current_search_client.indices.refresh()
    yield current_search_client
    list(current_search.delete(ignore=[404]))
コード例 #6
0
def es(app):
    """Provide elasticsearch access."""
    app.config.update(dict(SEARCH_AUTOINDEX=[], ))
    InvenioSearch(app)
    with app.app_context():
        try:
            list(current_search.create())
        except RequestError:
            list(current_search.delete(ignore=[404]))
            list(current_search.create())
        yield current_search
        list(current_search.delete(ignore=[404]))
コード例 #7
0
def prefixed_es(app):
    """Elasticsearch fixture."""
    app.config['SEARCH_INDEX_PREFIX'] = 'test-'
    try:
        list(current_search.create())
    except (RequestError, IndexAlreadyExistsError):
        list(current_search.delete(ignore=[404]))
        list(current_search.create(ignore=[400]))
    current_search_client.indices.refresh()
    yield current_search_client
    list(current_search.delete(ignore=[404]))
    app.config['SEARCH_INDEX_PREFIX'] = ''
コード例 #8
0
def es(app):
    """Provide elasticsearch access."""
    app.config.update(dict(
        SEARCH_AUTOINDEX=[],
    ))
    InvenioSearch(app)
    with app.app_context():
        try:
            list(current_search.create())
        except RequestError:
            list(current_search.delete(ignore=[404]))
            list(current_search.create())
        yield current_search
        list(current_search.delete(ignore=[404]))
コード例 #9
0
def es(app):
    """Provide elasticsearch access."""
    InvenioSearch(app)
    try:
        list(current_search.create())
    except RequestError:
        list(current_search.delete(ignore=[404]))
        list(current_search.create(ignore=[400]))
    current_search_client.indices.refresh()
    queue = app.config['INDEXER_MQ_QUEUE']
    with establish_connection() as c:
        q = queue(c)
        q.declare()
    yield current_search_client
    list(current_search.delete(ignore=[404]))
コード例 #10
0
def test_percolator(app, request):
    """Test percolator."""
    def teardown():
        with app.app_context():
            current_collections.unregister_signals()
            list(current_search.delete())

    request.addfinalizer(teardown)

    with app.test_request_context():
        app.config.update(
            COLLECTIONS_USE_PERCOLATOR=True,
            SEARCH_ELASTIC_KEYWORD_MAPPING={None: ['_all']},
        )

        search = InvenioSearch(app)
        search.register_mappings('records', 'data')

        InvenioIndexer(app)

        current_collections.unregister_signals()
        current_collections.register_signals()

        list(current_search.create())

        _try_populate_collections()
コード例 #11
0
def simple_record(app):
    yield {
        'data': {
            '$schema': 'http://localhost:5000/schemas/records/hep.json',
            '_collections': ['Literature'],
            'document_type': ['article'],
            'titles': [{'title': 'Superconductivity'}],
            'acquisition_source': {'source': 'arXiv'},
            'dois': [{'value': '10.3847/2041-8213/aa9110'}],
        },
        'extra_data': {
            'source_data': {
                'data': {
                    '$schema': 'http://localhost:5000/schemas/records/hep.json',
                    '_collections': ['Literature'],
                    'document_type': ['article'],
                    'titles': [{'title': 'Superconductivity'}],
                    'acquisition_source': {'source': 'arXiv'},
                    'dois': [{'value': '10.3847/2041-8213/aa9110'}],
                },
                'extra_data': {},
            },
        },
    }

    list(current_search.delete(index_list='holdingpen-hep'))
    list(current_search.create(ignore=[400], ignore_existing=True))
コード例 #12
0
def app():
    """Flask application.
    Creates a Flask application with a simple testing configuration,
    then creates an application context and yields, so that all tests
    have access to one.
    See: http://flask.pocoo.org/docs/0.12/appcontext/.
    """
    app = create_app(
        DEBUG=False,
        # Tests may fail when turned on because of Flask bug (A setup function was called after the first request
        # was handled. when initializing - when Alembic initialization)
        WTF_CSRF_ENABLED=False,
        CELERY_TASK_ALWAYS_EAGER=True,
        CELERY_RESULT_BACKEND='cache',
        CELERY_CACHE_BACKEND='memory',
        CELERY_TASK_EAGER_PROPAGATES=True,
        TESTING=True,
        PRODUCTION_MODE=True,
        SQLALCHEMY_DATABASE_URI=
        "postgresql+psycopg2://scoap3:dbpass123@localhost:5432/scoap3")

    with app.app_context():

        db.session.close()
        db.drop_all()
        db.create_all()

        list(current_search.delete(ignore=[404]))
        list(current_search.create(ignore=[400]))
        _create_files_location()

        current_search.flush_and_refresh('*')

        yield app
コード例 #13
0
ファイル: conftest.py プロジェクト: anukat2015/zenodo
def app(request):
    """Flask application fixture."""
    app = create_app(
        CELERY_ALWAYS_EAGER=True,
        CELERY_CACHE_BACKEND="memory",
        CELERY_EAGER_PROPAGATES_EXCEPTIONS=True,
        CELERY_RESULT_BACKEND="cache",
        DEBUG_TB_ENABLED=False,
        SECRET_KEY="CHANGE_ME",
        SECURITY_PASSWORD_SALT="CHANGE_ME",
        MAIL_SUPPRESS_SEND=True,
        SQLALCHEMY_DATABASE_URI=os.environ.get(
            'SQLALCHEMY_DATABASE_URI', 'sqlite:///test.db'),
        TESTING=True,
    )

    with app.app_context():
        if not database_exists(str(db.engine.url)):
            create_database(str(db.engine.url))
        db.drop_all()
        db.create_all()
        list(current_search.create())

    def teardown():
        with app.app_context():
            drop_database(str(db.engine.url))
            list(current_search.delete(ignore=[404]))

    request.addfinalizer(teardown)

    return app
コード例 #14
0
def test_percolator(app, request):
    """Test percolator."""
    def teardown():
        with app.app_context():
            current_collections.unregister_signals()
            list(current_search.delete())

    request.addfinalizer(teardown)

    with app.test_request_context():
        app.config.update(
            COLLECTIONS_USE_PERCOLATOR=True,
            SEARCH_ELASTIC_KEYWORD_MAPPING={None: ['_all']},
        )

        search = InvenioSearch(app)
        search.register_mappings('records', 'data')

        InvenioIndexer(app)

        current_collections.unregister_signals()
        current_collections.register_signals()

        list(current_search.create())

        _try_populate_collections()
コード例 #15
0
def test_whitelisted_aliases(app, aliases_config, expected_aliases):
    """Test functionality of active aliases configuration variable."""

    orig = app.config['SEARCH_MAPPINGS']

    search = app.extensions['invenio-search']
    search.register_mappings('records', 'mock_module.mappings')
    search.register_mappings('authors', 'mock_module.mappings')

    app.config.update(SEARCH_MAPPINGS=aliases_config)

    with app.app_context():
        current_search_client.indices.delete_alias('_all',
                                                   '_all',
                                                   ignore=[400, 404])
        current_search_client.indices.delete('*')
        list(current_search.create(ignore=None))

        aliases = current_search_client.indices.get_alias()
        if expected_aliases == []:
            assert 0 == len(aliases)
        else:
            assert current_search_client.indices.exists(expected_aliases)

    app.config['SEARCH_MAPPINGS'] = orig
コード例 #16
0
ファイル: conftest.py プロジェクト: duncanwp/zenodo
def app(request):
    """Flask application fixture."""
    app = create_app(
        CELERY_ALWAYS_EAGER=True,
        CELERY_CACHE_BACKEND="memory",
        CELERY_EAGER_PROPAGATES_EXCEPTIONS=True,
        CELERY_RESULT_BACKEND="cache",
        DEBUG_TB_ENABLED=False,
        SECRET_KEY="CHANGE_ME",
        SECURITY_PASSWORD_SALT="CHANGE_ME",
        MAIL_SUPPRESS_SEND=True,
        SQLALCHEMY_DATABASE_URI=os.environ.get('SQLALCHEMY_DATABASE_URI',
                                               'sqlite:///test.db'),
        TESTING=True,
    )

    with app.app_context():
        if not database_exists(str(db.engine.url)):
            create_database(str(db.engine.url))
        db.drop_all()
        db.create_all()
        list(current_search.create())

    def teardown():
        with app.app_context():
            drop_database(str(db.engine.url))
            list(current_search.delete(ignore=[404]))

    request.addfinalizer(teardown)

    return app
コード例 #17
0
ファイル: cli.py プロジェクト: tsgit/inspirehep
def remap_indexes(ctx, yes_i_know, indexes, ignore_checks):
    if not yes_i_know:
        click.confirm(
            "This operation will irreversibly remove data from selected indexes in ES, do you want to continue?",
            abort=True,
        )
    if not indexes:
        click.echo("You should specify indexes which you want to remap")
        click.echo(
            f"Available indexes are: {', '.join(current_search.mappings.keys())}"
        )
        ctx.exit(1)
    wrong_indexes = list(set(indexes) - set(current_search.mappings.keys()))
    if not ignore_checks and len(wrong_indexes) > 0:
        click.echo(f"Indexes {', '.join(wrong_indexes)} not recognized.")
        click.echo(
            f"Available indexes are: {', '.join(current_search.mappings.keys())}"
        )
        ctx.exit(1)

    click.echo(f"Deleting indexes: {', '.join(indexes)}")

    deleted_indexes = list(current_search.delete(index_list=indexes))
    if not ignore_checks and len(deleted_indexes) != len(indexes):
        click.echo(
            f"Number of deleted indexes ({len(deleted_indexes)} is different than requested ones ({len(indexes)}",
            err=True,
        )
        click.echo("deleted indexes %s" % [i[0] for i in deleted_indexes])
        ctx.exit(1)

    created_indexes = list(
        current_search.create(ignore_existing=True, index_list=indexes))
    click.echo("remapped indexes %s" % [i[0] for i in created_indexes])
コード例 #18
0
def test_whitelisted_aliases(app, aliases_config, expected_aliases):
    """Test functionality of active aliases configuration variable."""
    all_aliases = dict(authors=['authors', 'authors-authors-v1.0.0'],
                       records=[
                           'records',
                           'records-default-v1.0.0',
                           'records-authorities',
                           'records-authorities-authority-v1.0.0',
                           'records-bibliographic',
                           'records-bibliographic-bibliographic-v1.0.0',
                       ])

    orig = app.config['SEARCH_MAPPINGS']

    search = app.extensions['invenio-search']
    search.register_mappings('records', 'mock_module.mappings')
    search.register_mappings('authors', 'mock_module.mappings')

    app.config.update(SEARCH_MAPPINGS=aliases_config)

    current_search_client.indices.delete_alias('_all',
                                               '_all',
                                               ignore=[400, 404])
    current_search_client.indices.delete('*')
    list(current_search.create(ignore=None))

    aliases = current_search_client.indices.get_alias()
    if expected_aliases == []:
        assert 0 == len(aliases)
    else:
        for expected_alias in expected_aliases:
            all_expected = all_aliases[expected_alias]
            assert current_search_client.indices.exists(all_expected)

    app.config['SEARCH_MAPPINGS'] = orig
コード例 #19
0
ファイル: tasks.py プロジェクト: slint/asclepias-broker
def reindex_all_relationships(destroy: bool = False):
    """Reindex all relationship documents."""
    if destroy:
        list(current_search.delete(ignore=[400, 404]))
        list(current_search.create(ignore=[400, 404]))
    q = GroupRelationship.query.yield_per(1000)
    for chunk in chunks(q, 1000):
        index_documents(map(build_doc, chunk), bulk=True)
コード例 #20
0
def es(app):
    """Elasticsearch fixture."""
    # remove all indices and data to get to a well-defined state
    for idx in current_search_client.indices.get('*'):
        try:
            current_search_client.indices.delete(idx)
        except:
            pass

    try:
        list(current_search.create())
    except RequestError:
        list(current_search.delete(ignore=[404]))
        list(current_search.create(ignore=[400]))
    current_search_client.indices.refresh()
    yield current_search_client
    list(current_search.delete(ignore=[404]))
コード例 #21
0
ファイル: conftest.py プロジェクト: pazembrz/inspire-next
def app():
    """
    Deprecated: do not use this fixtures for new tests, unless for very
    specific use cases. Use `isolated_app` instead.

    Flask application with demosite data and without any database isolation:
    any db transaction performed during the tests are persisted into the db.

    Creates a Flask application with a simple testing configuration,
    then creates an application context and inside of it recreates
    all databases and indices from the fixtures. Finally it yields,
    so that all tests that explicitly use the ``app`` fixtures have
    access to an application context.

    See: http://flask.pocoo.org/docs/0.12/appcontext/.
    """
    app = create_app(
        DEBUG=False,
        # Tests may fail when turned on because of Flask bug (A setup function was called after the first request was handled. when initializing - when Alembic initialization)
        WTF_CSRF_ENABLED=False,
        CELERY_TASK_ALWAYS_EAGER=True,
        CELERY_RESULT_BACKEND='cache',
        CELERY_CACHE_BACKEND='memory',
        CELERY_TASK_EAGER_PROPAGATES=True,
        SECRET_KEY='secret!',
        RECORD_EDITOR_FILE_UPLOAD_FOLDER='tests/integration/editor/temp',
        TESTING=True,
    )
    app.extensions['invenio-search'].register_mappings(
        'records', 'inspirehep.modules.records.mappings')
    with app.app_context(), mock.patch(
            'inspirehep.modules.records.receivers.index_modified_citations_from_record.delay'
    ):
        # Celery task imports must be local, otherwise their
        # configuration would use the default pickle serializer.
        from inspirehep.modules.migrator.tasks import migrate_from_file

        db.session.close()
        db.drop_all()
        drop_alembic_version_table()

        alembic = Alembic(app=current_app)
        alembic.upgrade()

        list(current_search.delete(ignore=[404]))
        list(current_search.create(ignore=[400]))

        init_all_storage_paths()
        init_users_and_permissions()
        init_authentication_token()

        migrate_from_file('./inspirehep/demosite/data/demo-records.xml.gz',
                          wait_for_results=True)

        current_search.flush_and_refresh(
            'records-hep')  # Makes sure that all HEP records were migrated.

        yield app
コード例 #22
0
def test_creating_alias_existing_index(app, suffix, create_index, create_alias,
                                       expected):
    """Test creating new alias and index where there already exists one."""
    search = app.extensions['invenio-search']
    search.register_mappings('authors', 'mock_module.mappings')
    search._current_suffix = suffix
    current_search_client.indices.delete_alias('_all',
                                               '_all',
                                               ignore=[400, 404])
    current_search_client.indices.delete('*')
    new_indexes = []
    if create_index:
        current_search_client.indices.create(index=create_index)
        new_indexes.append(create_index)
    if create_alias:
        write_alias_index = '{}-suffix'.format(create_alias)
        current_search_client.indices.create(index=write_alias_index)
        new_indexes.append(write_alias_index)
        current_search_client.indices.put_alias(
            index=write_alias_index,
            name=create_alias,
        )
    if expected:
        results = list(current_search.create(ignore=None))
        assert len(results) == len(expected)
        for result in results:
            assert result[0] in expected
        indices = current_search_client.indices.get('*')
        index_names = list(indices.keys())
        alias_names = []
        for index in index_names:
            alias_names.extend(list(indices[index]['aliases'].keys()))
        for index, _ in results:
            if index.endswith(suffix):
                assert sorted(index_names) == sorted([index] + new_indexes)
            else:
                assert index in alias_names
    else:
        with pytest.raises(Exception):
            results = list(current_search.create(ignore=None))
        indices = current_search_client.indices.get('*')
        index_names = list(indices.keys())
        assert index_names == new_indexes
        if create_index:
            assert len(indices[create_index]['aliases']) == 0
コード例 #23
0
ファイル: cli.py プロジェクト: spirosdelviniotis/inspire-next
def clean_records():
    """Truncate all the records from various tables."""
    from sqlalchemy.engine import reflection
    from invenio_search import current_search

    click.secho('>>> Truncating all records.')

    tables_to_truncate = [
        "records_metadata",
        "records_metadata_version",
        "inspire_prod_records",
        "inspire_orcid_records",
        "pidstore_pid",
    ]
    db.session.begin(subtransactions=True)
    try:
        # Grab any table with foreign keys to records_metadata for truncating
        inspector = reflection.Inspector.from_engine(db.engine)
        for table_name in inspector.get_table_names():
            for foreign_key in inspector.get_foreign_keys(table_name):
                if foreign_key["referred_table"] == "records_metadata":
                    tables_to_truncate.append(table_name)

        if not click.confirm("Going to truncate:\n{0}".format(
                "\n".join(tables_to_truncate))):
            return

        click.secho('Truncating tables...', fg='red', bold=True, err=True)
        with click.progressbar(tables_to_truncate) as tables:
            for table in tables:
                db.engine.execute(
                    "TRUNCATE TABLE {0} RESTART IDENTITY CASCADE".format(
                        table))
                click.secho("\tTruncated {0}".format(table))

        db.session.commit()

        current_search.aliases = {
            k: v
            for k, v in current_search.aliases.iteritems()
            if k.startswith('records')
        }
        click.secho('Destroying indexes...', fg='red', bold=True, err=True)
        with click.progressbar(
                current_search.delete(ignore=[400, 404])) as bar:
            for name, response in bar:
                click.secho(name)

        click.secho('Creating indexes...', fg='green', bold=True, err=True)
        with click.progressbar(current_search.create(ignore=[400])) as bar:
            for name, response in bar:
                click.secho(name)

    except Exception as err:  # noqa
        db.session.rollback()
        current_app.logger.exception(err)
コード例 #24
0
ファイル: conftest.py プロジェクト: lnielsen/invenio-stats
def es(app):
    """Provide elasticsearch access, create and clean indices."""
    current_search_client.indices.delete(index='*')
    current_search_client.indices.delete_template('*')
    list(current_search.create())
    try:
        yield current_search_client
    finally:
        current_search_client.indices.delete(index='*')
        current_search_client.indices.delete_template('*')
コード例 #25
0
def elasticsearch_index_init(alembic, verbose):
    """Initialize the elasticsearch indices and indexing queue."""
    for _ in current_search.create(ignore=[400]):
        pass
    for _ in current_search.put_templates(ignore=[400]):
        pass
    queue = current_app.config['INDEXER_MQ_QUEUE']
    with establish_connection() as c:
        q = queue(c)
        q.declare()
コード例 #26
0
ファイル: common.py プロジェクト: emanueldima/b2share
def elasticsearch_index_init(alembic, verbose):
    """Initialize the elasticsearch indices and indexing queue."""
    for _ in current_search.create(ignore=[400]):
        pass
    for _ in current_search.put_templates(ignore=[400]):
        pass
    queue = current_app.config['INDEXER_MQ_QUEUE']
    with establish_connection() as c:
        q = queue(c)
        q.declare()
def es(base_app):
    """Provide elasticsearch access."""
    list(current_search.delete(ignore=[400, 404]))
    current_search_client.indices.delete(index='*')
    list(current_search.create())
    current_search_client.indices.refresh()
    try:
        yield current_search_client
    finally:
        current_search_client.indices.delete(index='*')
コード例 #28
0
def es(app):
    """Provide elasticsearch access."""
    list(current_search.delete(ignore=[400, 404]))
    current_search_client.indices.delete(index='*')
    list(current_search.create())
    current_search_client.indices.refresh()
    try:
        yield current_search_client
    finally:
        current_search_client.indices.delete(index='*')
コード例 #29
0
def es(app):
    """Provide elasticsearch access, create and clean indices."""
    current_search_client.indices.delete(index='*')
    current_search_client.indices.delete_template('*')
    list(current_search.create())
    try:
        yield current_search_client
    finally:
        current_search_client.indices.delete(index='*')
        current_search_client.indices.delete_template('*')
コード例 #30
0
ファイル: conftest.py プロジェクト: EUDAT-B2SHARE/b2share
def app(request, clean_app):
    """Application with database tables created."""
    with clean_app.app_context():
        ext = clean_app.extensions['invenio-db']
        db.metadata.create_all(db.session.connection())
        alembic_stamp('heads')
        db.session.commit()
        for created in current_search.create(None):
            pass
        # Note that we do not create the Migration just to simplify things.

    return clean_app
コード例 #31
0
def app(request, clean_app):
    """Application with database tables created."""
    with clean_app.app_context():
        ext = clean_app.extensions['invenio-db']
        db.metadata.create_all(db.session.connection())
        alembic_stamp('heads')
        db.session.commit()
        for created in current_search.create(None):
            pass
        # Note that we do not create the Migration just to simplify things.

    return clean_app
コード例 #32
0
ファイル: conftest.py プロジェクト: llehtine/b2share
def app(request, tmpdir):
    """Flask application fixture."""
    from b2share.factory import create_api

    instance_path = tmpdir.mkdir('instance_dir').strpath
    os.environ.update(
        B2SHARE_INSTANCE_PATH=os.environ.get(
            'INSTANCE_PATH', instance_path),
    )
    app = create_api(
        TESTING=True,
        SERVER_NAME='localhost:5000',
        JSONSCHEMAS_HOST='localhost:5000',
        DEBUG_TB_ENABLED=False,
        SQLALCHEMY_DATABASE_URI=os.environ.get(
            'SQLALCHEMY_DATABASE_URI', 'sqlite://'),
        LOGIN_DISABLED=False,
        WTF_CSRF_ENABLED=False,
        SECRET_KEY="CHANGE_ME",
        SECURITY_PASSWORD_SALT='CHANGEME',
        CELERY_ALWAYS_EAGER=True,
        CELERY_RESULT_BACKEND="cache",
        CELERY_CACHE_BACKEND="memory",
        CELERY_EAGER_PROPAGATES_EXCEPTIONS=True,
        SUPPORT_EMAIL='*****@*****.**',
    )

    # update the application with the configuration provided by the test
    if hasattr(request, 'param') and 'config' in request.param:
        app.config.update(**request.param['config'])

    with app.app_context():
        if app.config['SQLALCHEMY_DATABASE_URI'] != 'sqlite://':
            try:
                drop_database(db.engine.url)
            except ProgrammingError:
                pass
            create_database(db.engine.url)
        db.create_all()
        for deleted in current_search.delete(ignore=[404]):
            pass
        for created in current_search.create(None):
            pass

    def finalize():
        with app.app_context():
            db.drop_all()
            if app.config['SQLALCHEMY_DATABASE_URI'] != 'sqlite://':
                drop_database(db.engine.url)

    request.addfinalizer(finalize)
    return app
コード例 #33
0
def indexed_records(records):
    """Fixture for the records, which are already indexed."""
    current_search_client.indices.flush('*')
    # delete all elasticsearch indices and recreate them
    for deleted in current_search.delete(ignore=[404]):
        pass
    for created in current_search.create(None):
        pass
    # flush the indices so that indexed records are searchable
    for pid_name, record in records.items():
        RecordIndexer().index(record)
    current_search_client.indices.flush('*')
    return records
コード例 #34
0
ファイル: conftest.py プロジェクト: theleestarr/inspire-next
def clear_environment(app):
    with app.app_context():
        db.session.close()
        db.drop_all()
        drop_alembic_version_table()

        alembic = Alembic(app=app)
        alembic.upgrade()
        list(current_search.delete(ignore=[404]))
        list(current_search.create(ignore=[400]))
        current_search.flush_and_refresh('records-hep')

        init_all_storage_paths()
        init_users_and_permissions()
コード例 #35
0
def es(app):
    """Provide elasticsearch access, create and clean indices.

    Don't create template so that the test or another fixture can modify the
    enabled events.
    """
    current_search_client.indices.delete(index='*')
    current_search_client.indices.delete_template('*')
    list(current_search.create())
    try:
        yield current_search_client
    finally:
        current_search_client.indices.delete(index='*')
        current_search_client.indices.delete_template('*')
コード例 #36
0
ファイル: test_cli.py プロジェクト: hanadikaleel/inspirehep
def test_cli_create_aliases(inspire_app, cli, override_config):
    prefix = "test-cli-create-aliases-prefix-"
    with override_config(SEARCH_INDEX_PREFIX=prefix):
        list(current_search.create(ignore_existing=True))
        result = cli.invoke(["index", "create-aliases", "--yes-i-know"])
    assert result.exit_code == 0
    assert (
        result.output !=
        "This command can be executed only if SEARCH_INDEX_PREFIX is set.\n")
    assert "does not contain current prefix" in result.output

    aliases = []
    for x in current_search.client.indices.get_alias().values():
        aliases.extend(x["aliases"].keys())

    assert "records-hep" in aliases
    assert "records-authors" in aliases
    assert "records-seminars" in aliases
    assert "records-jobs" in aliases
    assert "records-institutions" in aliases
    assert "records-journals" in aliases
    assert "records-data" in aliases
    assert "records-experiments" in aliases
    assert "records-conferences" in aliases

    _test_alias_to_index("records-hep",
                         "test-cli-create-aliases-prefix-records-hep")
    _test_alias_to_index("records-authors",
                         "test-cli-create-aliases-prefix-records-authors")
    _test_alias_to_index("records-seminars",
                         "test-cli-create-aliases-prefix-records-seminars")
    _test_alias_to_index("records-jobs",
                         "test-cli-create-aliases-prefix-records-jobs")
    _test_alias_to_index(
        "records-institutions",
        "test-cli-create-aliases-prefix-records-institutions")
    _test_alias_to_index("records-journals",
                         "test-cli-create-aliases-prefix-records-journals")
    _test_alias_to_index("records-data",
                         "test-cli-create-aliases-prefix-records-data")
    _test_alias_to_index("records-experiments",
                         "test-cli-create-aliases-prefix-records-experiments")
    _test_alias_to_index("records-conferences",
                         "test-cli-create-aliases-prefix-records-conferences")

    current_search.flush_and_refresh("*")
    current_search.client.indices.delete_alias(f"{prefix}*", "*")
コード例 #37
0
def test_remap_index_when_there_are_more_than_one_indexes_with_same_name_but_different_postfix(
        inspire_app, cli):
    current_search._current_suffix = f"-{random.getrandbits(64)}"
    list(current_search.create(ignore_existing=True,
                               index_list="records-data"))
    current_search._current_suffix = f"-{random.getrandbits(64)}"
    indexes_before = set(current_search.client.indices.get("*").keys())
    result = cli.invoke(
        ["index", "remap", "--index", "records-data", "--yes-i-know"])
    current_search.flush_and_refresh("*")

    assert result.exit_code == 1
    indexes_after = set(current_search.client.indices.get("*").keys())
    difference = sorted(indexes_after - indexes_before)
    assert len(difference) == 0

    list(current_search.delete("*"))
    current_search._current_suffix = None
コード例 #38
0
def purge(yes_i_know):
    """Removes every entry from DB and ES related to workflows"""
    query = "TRUNCATE {tables} RESTART IDENTITY"
    if not yes_i_know:
        click.confirm(
            'This operation will irreversibly remove data from DB and ES, do you want to continue?',
            abort=True)
    click.secho('Truncating tables from DB:\n* {}'.format('\n* '.join(TABLES)))
    db.session.execute(query.format(tables=', '.join(TABLES)))
    db.session.commit()

    click.secho('Removing workflows indices from ES...')
    list(current_search.delete(index_list=ES_INDICES))

    click.secho('Recreating indices...')
    list(current_search.create(ignore_existing=True, index_list=ES_INDICES))

    click.secho('Purge completed')
コード例 #39
0
def test_cli_delete_indexes_prefixed_aliases(inspire_app, cli):
    prefix = "test-cli-delete-aliases-prefix-"
    prefix_regex = re.compile(f"""{prefix}.*""")
    with override_config(SEARCH_INDEX_PREFIX=prefix):
        list(current_search.create(ignore_existing=True))
        result = cli.invoke([
            "index",
            "delete-indexes",
            "--yes-i-know",
            "--prefix",
            "test-cli-delete-aliases-prefix-",
        ])
        current_search.flush_and_refresh("*")
    assert result.exit_code == 0

    assert not list(
        filter(prefix_regex.match,
               current_search.client.indices.get_alias().keys()))
    assert "No indices matching given prefix found." not in result.output
コード例 #40
0
ファイル: test_records_index.py プロジェクト: SarahBA/b2share
def test_record_indexing(app, test_users, test_records, script_info,
                           login_user):
    """Test record indexing and reindexing."""
    creator = test_users['deposits_creator']

    with app.app_context():
        # flush the indices so that indexed records are searchable
        current_search_client.indices.flush('*')
    # records and deposits should be indexed
    subtest_record_search(app, creator, test_records, test_records, login_user)

    with app.app_context():
        current_search_client.indices.flush('*')
        # delete all elasticsearch indices and recreate them
        for deleted in current_search.delete(ignore=[404]):
            pass
        for created in current_search.create(None):
            pass
        # flush the indices so that indexed records are searchable
        current_search_client.indices.flush('*')

    # all records should have been deleted
    subtest_record_search(app, creator, [], [], login_user)

    with app.app_context():
        runner = CliRunner()
        # Initialize queue
        res = runner.invoke(cli.queue, ['init', 'purge'],
                            obj=script_info)
        assert 0 == res.exit_code
        # schedule a reindex task
        res = runner.invoke(cli.reindex, ['--yes-i-know'], obj=script_info)
        assert 0 == res.exit_code
        # execute scheduled tasks synchronously
        process_bulk_queue.delay()
        # flush the indices so that indexed records are searchable
        current_search_client.indices.flush('*')

    # records and deposits should be indexed again
    subtest_record_search(app, creator, test_records, test_records, login_user)
コード例 #41
0
def test_record_indexing(app, test_users, test_records, script_info,
                           login_user):
    """Test record indexing and reindexing."""
    creator = test_users['deposits_creator']

    with app.app_context():
        # flush the indices so that indexed records are searchable
        current_search_client.indices.flush('*')
    # records and deposits should be indexed
    subtest_record_search(app, creator, test_records, test_records, login_user)

    with app.app_context():
        current_search_client.indices.flush('*')
        # delete all elasticsearch indices and recreate them
        for deleted in current_search.delete(ignore=[404]):
            pass
        for created in current_search.create(None):
            pass
        # flush the indices so that indexed records are searchable
        current_search_client.indices.flush('*')

    # all records should have been deleted
    subtest_record_search(app, creator, [], [], login_user)

    with app.app_context():
        runner = CliRunner()
        # Initialize queue
        res = runner.invoke(cli.queue, ['init', 'purge'],
                            obj=script_info)
        assert 0 == res.exit_code
        # schedule a reindex task
        res = runner.invoke(cli.reindex, ['--yes-i-know'], obj=script_info)
        assert 0 == res.exit_code
        # execute scheduled tasks synchronously
        process_bulk_queue.delay()
        # flush the indices so that indexed records are searchable
        current_search_client.indices.flush('*')

    # records and deposits should be indexed again
    subtest_record_search(app, creator, test_records, test_records, login_user)
コード例 #42
0
ファイル: cli.py プロジェクト: jacenkow/inspire-next
def clean_records():
    """Truncate all the records from various tables."""
    from sqlalchemy.engine import reflection
    from invenio_search import current_search

    click.secho('>>> Truncating all records.')

    tables_to_truncate = [
        "records_metadata",
        "records_metadata_version",
        "inspire_prod_records",
        "inspire_orcid_records",
        "pidstore_pid",
    ]
    db.session.begin(subtransactions=True)
    try:
        # Grab any table with foreign keys to records_metadata for truncating
        inspector = reflection.Inspector.from_engine(db.engine)
        for table_name in inspector.get_table_names():
            for foreign_key in inspector.get_foreign_keys(table_name):
                if foreign_key["referred_table"] == "records_metadata":
                    tables_to_truncate.append(table_name)

        if not click.confirm("Going to truncate:\n{0}".format(
                "\n".join(tables_to_truncate))):
            return

        click.secho('Truncating tables...', fg='red', bold=True,
                    err=True)
        with click.progressbar(tables_to_truncate) as tables:
            for table in tables:
                db.engine.execute("TRUNCATE TABLE {0} RESTART IDENTITY CASCADE".format(table))
                click.secho("\tTruncated {0}".format(table))

        db.session.commit()

        current_search.aliases = {
            k: v for k, v in current_search.aliases.iteritems()
            if k.startswith('records')
        }
        click.secho('Destroying indexes...',
                    fg='red',
                    bold=True,
                    err=True)
        with click.progressbar(
                current_search.delete(ignore=[400, 404])) as bar:
            for name, response in bar:
                click.secho(name)

        click.secho('Creating indexes...',
                    fg='green',
                    bold=True,
                    err=True)
        with click.progressbar(
                current_search.create(ignore=[400])) as bar:
            for name, response in bar:
                click.secho(name)

    except Exception as err:  # noqa
        db.session.rollback()
        current_app.logger.exception(err)
コード例 #43
0
ファイル: conftest.py プロジェクト: duncanwp/zenodo
def es(app):
    """Provide elasticsearch access."""
    list(current_search.create())
    yield current_search
    list(current_search.delete(ignore=[404]))