def es(app): """Provide elasticsearch access.""" try: list(current_search.create()) except RequestError: list(current_search.delete(ignore=[404])) list(current_search.create()) yield current_search list(current_search.delete(ignore=[404]))
def es(app): """Elasticsearch fixture.""" try: list(current_search.create()) except RequestError: list(current_search.delete(ignore=[404])) list(current_search.create(ignore=[400])) current_search_client.indices.refresh() yield current_search_client list(current_search.delete(ignore=[404]))
def es(app): """Provide elasticsearch access.""" try: list(current_search.create()) except RequestError: list(current_search.delete(ignore=[400, 404])) list(current_search.create()) current_search_client.indices.refresh() yield current_search_client list(current_search.delete(ignore=[404]))
def es(app): """Provide elasticsearch access.""" app.config.update(dict(SEARCH_AUTOINDEX=[], )) InvenioSearch(app) with app.app_context(): try: list(current_search.create()) except RequestError: list(current_search.delete(ignore=[404])) list(current_search.create()) yield current_search list(current_search.delete(ignore=[404]))
def prefixed_es(app): """Elasticsearch fixture.""" app.config['SEARCH_INDEX_PREFIX'] = 'test-' try: list(current_search.create()) except (RequestError, IndexAlreadyExistsError): list(current_search.delete(ignore=[404])) list(current_search.create(ignore=[400])) current_search_client.indices.refresh() yield current_search_client list(current_search.delete(ignore=[404])) app.config['SEARCH_INDEX_PREFIX'] = ''
def es(app): """Provide elasticsearch access.""" app.config.update(dict( SEARCH_AUTOINDEX=[], )) InvenioSearch(app) with app.app_context(): try: list(current_search.create()) except RequestError: list(current_search.delete(ignore=[404])) list(current_search.create()) yield current_search list(current_search.delete(ignore=[404]))
def es(app): """Provide elasticsearch access.""" InvenioSearch(app) try: list(current_search.create()) except RequestError: list(current_search.delete(ignore=[404])) list(current_search.create(ignore=[400])) current_search_client.indices.refresh() queue = app.config['INDEXER_MQ_QUEUE'] with establish_connection() as c: q = queue(c) q.declare() yield current_search_client list(current_search.delete(ignore=[404]))
def test_percolator(app, request): """Test percolator.""" def teardown(): with app.app_context(): current_collections.unregister_signals() list(current_search.delete()) request.addfinalizer(teardown) with app.test_request_context(): app.config.update( COLLECTIONS_USE_PERCOLATOR=True, SEARCH_ELASTIC_KEYWORD_MAPPING={None: ['_all']}, ) search = InvenioSearch(app) search.register_mappings('records', 'data') InvenioIndexer(app) current_collections.unregister_signals() current_collections.register_signals() list(current_search.create()) _try_populate_collections()
def simple_record(app): yield { 'data': { '$schema': 'http://localhost:5000/schemas/records/hep.json', '_collections': ['Literature'], 'document_type': ['article'], 'titles': [{'title': 'Superconductivity'}], 'acquisition_source': {'source': 'arXiv'}, 'dois': [{'value': '10.3847/2041-8213/aa9110'}], }, 'extra_data': { 'source_data': { 'data': { '$schema': 'http://localhost:5000/schemas/records/hep.json', '_collections': ['Literature'], 'document_type': ['article'], 'titles': [{'title': 'Superconductivity'}], 'acquisition_source': {'source': 'arXiv'}, 'dois': [{'value': '10.3847/2041-8213/aa9110'}], }, 'extra_data': {}, }, }, } list(current_search.delete(index_list='holdingpen-hep')) list(current_search.create(ignore=[400], ignore_existing=True))
def app(): """Flask application. Creates a Flask application with a simple testing configuration, then creates an application context and yields, so that all tests have access to one. See: http://flask.pocoo.org/docs/0.12/appcontext/. """ app = create_app( DEBUG=False, # Tests may fail when turned on because of Flask bug (A setup function was called after the first request # was handled. when initializing - when Alembic initialization) WTF_CSRF_ENABLED=False, CELERY_TASK_ALWAYS_EAGER=True, CELERY_RESULT_BACKEND='cache', CELERY_CACHE_BACKEND='memory', CELERY_TASK_EAGER_PROPAGATES=True, TESTING=True, PRODUCTION_MODE=True, SQLALCHEMY_DATABASE_URI= "postgresql+psycopg2://scoap3:dbpass123@localhost:5432/scoap3") with app.app_context(): db.session.close() db.drop_all() db.create_all() list(current_search.delete(ignore=[404])) list(current_search.create(ignore=[400])) _create_files_location() current_search.flush_and_refresh('*') yield app
def app(request): """Flask application fixture.""" app = create_app( CELERY_ALWAYS_EAGER=True, CELERY_CACHE_BACKEND="memory", CELERY_EAGER_PROPAGATES_EXCEPTIONS=True, CELERY_RESULT_BACKEND="cache", DEBUG_TB_ENABLED=False, SECRET_KEY="CHANGE_ME", SECURITY_PASSWORD_SALT="CHANGE_ME", MAIL_SUPPRESS_SEND=True, SQLALCHEMY_DATABASE_URI=os.environ.get( 'SQLALCHEMY_DATABASE_URI', 'sqlite:///test.db'), TESTING=True, ) with app.app_context(): if not database_exists(str(db.engine.url)): create_database(str(db.engine.url)) db.drop_all() db.create_all() list(current_search.create()) def teardown(): with app.app_context(): drop_database(str(db.engine.url)) list(current_search.delete(ignore=[404])) request.addfinalizer(teardown) return app
def test_whitelisted_aliases(app, aliases_config, expected_aliases): """Test functionality of active aliases configuration variable.""" orig = app.config['SEARCH_MAPPINGS'] search = app.extensions['invenio-search'] search.register_mappings('records', 'mock_module.mappings') search.register_mappings('authors', 'mock_module.mappings') app.config.update(SEARCH_MAPPINGS=aliases_config) with app.app_context(): current_search_client.indices.delete_alias('_all', '_all', ignore=[400, 404]) current_search_client.indices.delete('*') list(current_search.create(ignore=None)) aliases = current_search_client.indices.get_alias() if expected_aliases == []: assert 0 == len(aliases) else: assert current_search_client.indices.exists(expected_aliases) app.config['SEARCH_MAPPINGS'] = orig
def app(request): """Flask application fixture.""" app = create_app( CELERY_ALWAYS_EAGER=True, CELERY_CACHE_BACKEND="memory", CELERY_EAGER_PROPAGATES_EXCEPTIONS=True, CELERY_RESULT_BACKEND="cache", DEBUG_TB_ENABLED=False, SECRET_KEY="CHANGE_ME", SECURITY_PASSWORD_SALT="CHANGE_ME", MAIL_SUPPRESS_SEND=True, SQLALCHEMY_DATABASE_URI=os.environ.get('SQLALCHEMY_DATABASE_URI', 'sqlite:///test.db'), TESTING=True, ) with app.app_context(): if not database_exists(str(db.engine.url)): create_database(str(db.engine.url)) db.drop_all() db.create_all() list(current_search.create()) def teardown(): with app.app_context(): drop_database(str(db.engine.url)) list(current_search.delete(ignore=[404])) request.addfinalizer(teardown) return app
def remap_indexes(ctx, yes_i_know, indexes, ignore_checks): if not yes_i_know: click.confirm( "This operation will irreversibly remove data from selected indexes in ES, do you want to continue?", abort=True, ) if not indexes: click.echo("You should specify indexes which you want to remap") click.echo( f"Available indexes are: {', '.join(current_search.mappings.keys())}" ) ctx.exit(1) wrong_indexes = list(set(indexes) - set(current_search.mappings.keys())) if not ignore_checks and len(wrong_indexes) > 0: click.echo(f"Indexes {', '.join(wrong_indexes)} not recognized.") click.echo( f"Available indexes are: {', '.join(current_search.mappings.keys())}" ) ctx.exit(1) click.echo(f"Deleting indexes: {', '.join(indexes)}") deleted_indexes = list(current_search.delete(index_list=indexes)) if not ignore_checks and len(deleted_indexes) != len(indexes): click.echo( f"Number of deleted indexes ({len(deleted_indexes)} is different than requested ones ({len(indexes)}", err=True, ) click.echo("deleted indexes %s" % [i[0] for i in deleted_indexes]) ctx.exit(1) created_indexes = list( current_search.create(ignore_existing=True, index_list=indexes)) click.echo("remapped indexes %s" % [i[0] for i in created_indexes])
def test_whitelisted_aliases(app, aliases_config, expected_aliases): """Test functionality of active aliases configuration variable.""" all_aliases = dict(authors=['authors', 'authors-authors-v1.0.0'], records=[ 'records', 'records-default-v1.0.0', 'records-authorities', 'records-authorities-authority-v1.0.0', 'records-bibliographic', 'records-bibliographic-bibliographic-v1.0.0', ]) orig = app.config['SEARCH_MAPPINGS'] search = app.extensions['invenio-search'] search.register_mappings('records', 'mock_module.mappings') search.register_mappings('authors', 'mock_module.mappings') app.config.update(SEARCH_MAPPINGS=aliases_config) current_search_client.indices.delete_alias('_all', '_all', ignore=[400, 404]) current_search_client.indices.delete('*') list(current_search.create(ignore=None)) aliases = current_search_client.indices.get_alias() if expected_aliases == []: assert 0 == len(aliases) else: for expected_alias in expected_aliases: all_expected = all_aliases[expected_alias] assert current_search_client.indices.exists(all_expected) app.config['SEARCH_MAPPINGS'] = orig
def reindex_all_relationships(destroy: bool = False): """Reindex all relationship documents.""" if destroy: list(current_search.delete(ignore=[400, 404])) list(current_search.create(ignore=[400, 404])) q = GroupRelationship.query.yield_per(1000) for chunk in chunks(q, 1000): index_documents(map(build_doc, chunk), bulk=True)
def es(app): """Elasticsearch fixture.""" # remove all indices and data to get to a well-defined state for idx in current_search_client.indices.get('*'): try: current_search_client.indices.delete(idx) except: pass try: list(current_search.create()) except RequestError: list(current_search.delete(ignore=[404])) list(current_search.create(ignore=[400])) current_search_client.indices.refresh() yield current_search_client list(current_search.delete(ignore=[404]))
def app(): """ Deprecated: do not use this fixtures for new tests, unless for very specific use cases. Use `isolated_app` instead. Flask application with demosite data and without any database isolation: any db transaction performed during the tests are persisted into the db. Creates a Flask application with a simple testing configuration, then creates an application context and inside of it recreates all databases and indices from the fixtures. Finally it yields, so that all tests that explicitly use the ``app`` fixtures have access to an application context. See: http://flask.pocoo.org/docs/0.12/appcontext/. """ app = create_app( DEBUG=False, # Tests may fail when turned on because of Flask bug (A setup function was called after the first request was handled. when initializing - when Alembic initialization) WTF_CSRF_ENABLED=False, CELERY_TASK_ALWAYS_EAGER=True, CELERY_RESULT_BACKEND='cache', CELERY_CACHE_BACKEND='memory', CELERY_TASK_EAGER_PROPAGATES=True, SECRET_KEY='secret!', RECORD_EDITOR_FILE_UPLOAD_FOLDER='tests/integration/editor/temp', TESTING=True, ) app.extensions['invenio-search'].register_mappings( 'records', 'inspirehep.modules.records.mappings') with app.app_context(), mock.patch( 'inspirehep.modules.records.receivers.index_modified_citations_from_record.delay' ): # Celery task imports must be local, otherwise their # configuration would use the default pickle serializer. from inspirehep.modules.migrator.tasks import migrate_from_file db.session.close() db.drop_all() drop_alembic_version_table() alembic = Alembic(app=current_app) alembic.upgrade() list(current_search.delete(ignore=[404])) list(current_search.create(ignore=[400])) init_all_storage_paths() init_users_and_permissions() init_authentication_token() migrate_from_file('./inspirehep/demosite/data/demo-records.xml.gz', wait_for_results=True) current_search.flush_and_refresh( 'records-hep') # Makes sure that all HEP records were migrated. yield app
def test_creating_alias_existing_index(app, suffix, create_index, create_alias, expected): """Test creating new alias and index where there already exists one.""" search = app.extensions['invenio-search'] search.register_mappings('authors', 'mock_module.mappings') search._current_suffix = suffix current_search_client.indices.delete_alias('_all', '_all', ignore=[400, 404]) current_search_client.indices.delete('*') new_indexes = [] if create_index: current_search_client.indices.create(index=create_index) new_indexes.append(create_index) if create_alias: write_alias_index = '{}-suffix'.format(create_alias) current_search_client.indices.create(index=write_alias_index) new_indexes.append(write_alias_index) current_search_client.indices.put_alias( index=write_alias_index, name=create_alias, ) if expected: results = list(current_search.create(ignore=None)) assert len(results) == len(expected) for result in results: assert result[0] in expected indices = current_search_client.indices.get('*') index_names = list(indices.keys()) alias_names = [] for index in index_names: alias_names.extend(list(indices[index]['aliases'].keys())) for index, _ in results: if index.endswith(suffix): assert sorted(index_names) == sorted([index] + new_indexes) else: assert index in alias_names else: with pytest.raises(Exception): results = list(current_search.create(ignore=None)) indices = current_search_client.indices.get('*') index_names = list(indices.keys()) assert index_names == new_indexes if create_index: assert len(indices[create_index]['aliases']) == 0
def clean_records(): """Truncate all the records from various tables.""" from sqlalchemy.engine import reflection from invenio_search import current_search click.secho('>>> Truncating all records.') tables_to_truncate = [ "records_metadata", "records_metadata_version", "inspire_prod_records", "inspire_orcid_records", "pidstore_pid", ] db.session.begin(subtransactions=True) try: # Grab any table with foreign keys to records_metadata for truncating inspector = reflection.Inspector.from_engine(db.engine) for table_name in inspector.get_table_names(): for foreign_key in inspector.get_foreign_keys(table_name): if foreign_key["referred_table"] == "records_metadata": tables_to_truncate.append(table_name) if not click.confirm("Going to truncate:\n{0}".format( "\n".join(tables_to_truncate))): return click.secho('Truncating tables...', fg='red', bold=True, err=True) with click.progressbar(tables_to_truncate) as tables: for table in tables: db.engine.execute( "TRUNCATE TABLE {0} RESTART IDENTITY CASCADE".format( table)) click.secho("\tTruncated {0}".format(table)) db.session.commit() current_search.aliases = { k: v for k, v in current_search.aliases.iteritems() if k.startswith('records') } click.secho('Destroying indexes...', fg='red', bold=True, err=True) with click.progressbar( current_search.delete(ignore=[400, 404])) as bar: for name, response in bar: click.secho(name) click.secho('Creating indexes...', fg='green', bold=True, err=True) with click.progressbar(current_search.create(ignore=[400])) as bar: for name, response in bar: click.secho(name) except Exception as err: # noqa db.session.rollback() current_app.logger.exception(err)
def es(app): """Provide elasticsearch access, create and clean indices.""" current_search_client.indices.delete(index='*') current_search_client.indices.delete_template('*') list(current_search.create()) try: yield current_search_client finally: current_search_client.indices.delete(index='*') current_search_client.indices.delete_template('*')
def elasticsearch_index_init(alembic, verbose): """Initialize the elasticsearch indices and indexing queue.""" for _ in current_search.create(ignore=[400]): pass for _ in current_search.put_templates(ignore=[400]): pass queue = current_app.config['INDEXER_MQ_QUEUE'] with establish_connection() as c: q = queue(c) q.declare()
def es(base_app): """Provide elasticsearch access.""" list(current_search.delete(ignore=[400, 404])) current_search_client.indices.delete(index='*') list(current_search.create()) current_search_client.indices.refresh() try: yield current_search_client finally: current_search_client.indices.delete(index='*')
def es(app): """Provide elasticsearch access.""" list(current_search.delete(ignore=[400, 404])) current_search_client.indices.delete(index='*') list(current_search.create()) current_search_client.indices.refresh() try: yield current_search_client finally: current_search_client.indices.delete(index='*')
def app(request, clean_app): """Application with database tables created.""" with clean_app.app_context(): ext = clean_app.extensions['invenio-db'] db.metadata.create_all(db.session.connection()) alembic_stamp('heads') db.session.commit() for created in current_search.create(None): pass # Note that we do not create the Migration just to simplify things. return clean_app
def app(request, tmpdir): """Flask application fixture.""" from b2share.factory import create_api instance_path = tmpdir.mkdir('instance_dir').strpath os.environ.update( B2SHARE_INSTANCE_PATH=os.environ.get( 'INSTANCE_PATH', instance_path), ) app = create_api( TESTING=True, SERVER_NAME='localhost:5000', JSONSCHEMAS_HOST='localhost:5000', DEBUG_TB_ENABLED=False, SQLALCHEMY_DATABASE_URI=os.environ.get( 'SQLALCHEMY_DATABASE_URI', 'sqlite://'), LOGIN_DISABLED=False, WTF_CSRF_ENABLED=False, SECRET_KEY="CHANGE_ME", SECURITY_PASSWORD_SALT='CHANGEME', CELERY_ALWAYS_EAGER=True, CELERY_RESULT_BACKEND="cache", CELERY_CACHE_BACKEND="memory", CELERY_EAGER_PROPAGATES_EXCEPTIONS=True, SUPPORT_EMAIL='*****@*****.**', ) # update the application with the configuration provided by the test if hasattr(request, 'param') and 'config' in request.param: app.config.update(**request.param['config']) with app.app_context(): if app.config['SQLALCHEMY_DATABASE_URI'] != 'sqlite://': try: drop_database(db.engine.url) except ProgrammingError: pass create_database(db.engine.url) db.create_all() for deleted in current_search.delete(ignore=[404]): pass for created in current_search.create(None): pass def finalize(): with app.app_context(): db.drop_all() if app.config['SQLALCHEMY_DATABASE_URI'] != 'sqlite://': drop_database(db.engine.url) request.addfinalizer(finalize) return app
def indexed_records(records): """Fixture for the records, which are already indexed.""" current_search_client.indices.flush('*') # delete all elasticsearch indices and recreate them for deleted in current_search.delete(ignore=[404]): pass for created in current_search.create(None): pass # flush the indices so that indexed records are searchable for pid_name, record in records.items(): RecordIndexer().index(record) current_search_client.indices.flush('*') return records
def clear_environment(app): with app.app_context(): db.session.close() db.drop_all() drop_alembic_version_table() alembic = Alembic(app=app) alembic.upgrade() list(current_search.delete(ignore=[404])) list(current_search.create(ignore=[400])) current_search.flush_and_refresh('records-hep') init_all_storage_paths() init_users_and_permissions()
def es(app): """Provide elasticsearch access, create and clean indices. Don't create template so that the test or another fixture can modify the enabled events. """ current_search_client.indices.delete(index='*') current_search_client.indices.delete_template('*') list(current_search.create()) try: yield current_search_client finally: current_search_client.indices.delete(index='*') current_search_client.indices.delete_template('*')
def test_cli_create_aliases(inspire_app, cli, override_config): prefix = "test-cli-create-aliases-prefix-" with override_config(SEARCH_INDEX_PREFIX=prefix): list(current_search.create(ignore_existing=True)) result = cli.invoke(["index", "create-aliases", "--yes-i-know"]) assert result.exit_code == 0 assert ( result.output != "This command can be executed only if SEARCH_INDEX_PREFIX is set.\n") assert "does not contain current prefix" in result.output aliases = [] for x in current_search.client.indices.get_alias().values(): aliases.extend(x["aliases"].keys()) assert "records-hep" in aliases assert "records-authors" in aliases assert "records-seminars" in aliases assert "records-jobs" in aliases assert "records-institutions" in aliases assert "records-journals" in aliases assert "records-data" in aliases assert "records-experiments" in aliases assert "records-conferences" in aliases _test_alias_to_index("records-hep", "test-cli-create-aliases-prefix-records-hep") _test_alias_to_index("records-authors", "test-cli-create-aliases-prefix-records-authors") _test_alias_to_index("records-seminars", "test-cli-create-aliases-prefix-records-seminars") _test_alias_to_index("records-jobs", "test-cli-create-aliases-prefix-records-jobs") _test_alias_to_index( "records-institutions", "test-cli-create-aliases-prefix-records-institutions") _test_alias_to_index("records-journals", "test-cli-create-aliases-prefix-records-journals") _test_alias_to_index("records-data", "test-cli-create-aliases-prefix-records-data") _test_alias_to_index("records-experiments", "test-cli-create-aliases-prefix-records-experiments") _test_alias_to_index("records-conferences", "test-cli-create-aliases-prefix-records-conferences") current_search.flush_and_refresh("*") current_search.client.indices.delete_alias(f"{prefix}*", "*")
def test_remap_index_when_there_are_more_than_one_indexes_with_same_name_but_different_postfix( inspire_app, cli): current_search._current_suffix = f"-{random.getrandbits(64)}" list(current_search.create(ignore_existing=True, index_list="records-data")) current_search._current_suffix = f"-{random.getrandbits(64)}" indexes_before = set(current_search.client.indices.get("*").keys()) result = cli.invoke( ["index", "remap", "--index", "records-data", "--yes-i-know"]) current_search.flush_and_refresh("*") assert result.exit_code == 1 indexes_after = set(current_search.client.indices.get("*").keys()) difference = sorted(indexes_after - indexes_before) assert len(difference) == 0 list(current_search.delete("*")) current_search._current_suffix = None
def purge(yes_i_know): """Removes every entry from DB and ES related to workflows""" query = "TRUNCATE {tables} RESTART IDENTITY" if not yes_i_know: click.confirm( 'This operation will irreversibly remove data from DB and ES, do you want to continue?', abort=True) click.secho('Truncating tables from DB:\n* {}'.format('\n* '.join(TABLES))) db.session.execute(query.format(tables=', '.join(TABLES))) db.session.commit() click.secho('Removing workflows indices from ES...') list(current_search.delete(index_list=ES_INDICES)) click.secho('Recreating indices...') list(current_search.create(ignore_existing=True, index_list=ES_INDICES)) click.secho('Purge completed')
def test_cli_delete_indexes_prefixed_aliases(inspire_app, cli): prefix = "test-cli-delete-aliases-prefix-" prefix_regex = re.compile(f"""{prefix}.*""") with override_config(SEARCH_INDEX_PREFIX=prefix): list(current_search.create(ignore_existing=True)) result = cli.invoke([ "index", "delete-indexes", "--yes-i-know", "--prefix", "test-cli-delete-aliases-prefix-", ]) current_search.flush_and_refresh("*") assert result.exit_code == 0 assert not list( filter(prefix_regex.match, current_search.client.indices.get_alias().keys())) assert "No indices matching given prefix found." not in result.output
def test_record_indexing(app, test_users, test_records, script_info, login_user): """Test record indexing and reindexing.""" creator = test_users['deposits_creator'] with app.app_context(): # flush the indices so that indexed records are searchable current_search_client.indices.flush('*') # records and deposits should be indexed subtest_record_search(app, creator, test_records, test_records, login_user) with app.app_context(): current_search_client.indices.flush('*') # delete all elasticsearch indices and recreate them for deleted in current_search.delete(ignore=[404]): pass for created in current_search.create(None): pass # flush the indices so that indexed records are searchable current_search_client.indices.flush('*') # all records should have been deleted subtest_record_search(app, creator, [], [], login_user) with app.app_context(): runner = CliRunner() # Initialize queue res = runner.invoke(cli.queue, ['init', 'purge'], obj=script_info) assert 0 == res.exit_code # schedule a reindex task res = runner.invoke(cli.reindex, ['--yes-i-know'], obj=script_info) assert 0 == res.exit_code # execute scheduled tasks synchronously process_bulk_queue.delay() # flush the indices so that indexed records are searchable current_search_client.indices.flush('*') # records and deposits should be indexed again subtest_record_search(app, creator, test_records, test_records, login_user)
def clean_records(): """Truncate all the records from various tables.""" from sqlalchemy.engine import reflection from invenio_search import current_search click.secho('>>> Truncating all records.') tables_to_truncate = [ "records_metadata", "records_metadata_version", "inspire_prod_records", "inspire_orcid_records", "pidstore_pid", ] db.session.begin(subtransactions=True) try: # Grab any table with foreign keys to records_metadata for truncating inspector = reflection.Inspector.from_engine(db.engine) for table_name in inspector.get_table_names(): for foreign_key in inspector.get_foreign_keys(table_name): if foreign_key["referred_table"] == "records_metadata": tables_to_truncate.append(table_name) if not click.confirm("Going to truncate:\n{0}".format( "\n".join(tables_to_truncate))): return click.secho('Truncating tables...', fg='red', bold=True, err=True) with click.progressbar(tables_to_truncate) as tables: for table in tables: db.engine.execute("TRUNCATE TABLE {0} RESTART IDENTITY CASCADE".format(table)) click.secho("\tTruncated {0}".format(table)) db.session.commit() current_search.aliases = { k: v for k, v in current_search.aliases.iteritems() if k.startswith('records') } click.secho('Destroying indexes...', fg='red', bold=True, err=True) with click.progressbar( current_search.delete(ignore=[400, 404])) as bar: for name, response in bar: click.secho(name) click.secho('Creating indexes...', fg='green', bold=True, err=True) with click.progressbar( current_search.create(ignore=[400])) as bar: for name, response in bar: click.secho(name) except Exception as err: # noqa db.session.rollback() current_app.logger.exception(err)
def es(app): """Provide elasticsearch access.""" list(current_search.create()) yield current_search list(current_search.delete(ignore=[404]))