def handle(self, *args, **options): host = settings.HAYSTACK_CONNECTIONS['default']['URL'] alias = settings.HAYSTACK_CONNECTIONS['default']['INDEX_NAME'] logger.info('Attempting to establish initial connection to Elasticsearch host [%s]...', host) es = Elasticsearch(host) logger.info('...success!') ElasticsearchUtils.create_alias_and_index(es, alias)
def handle(self, *args, **options): host = settings.HAYSTACK_CONNECTIONS['default']['URL'] alias = settings.HAYSTACK_CONNECTIONS['default']['INDEX_NAME'] logger.info( 'Attempting to establish initial connection to Elasticsearch host [%s]...', host) es = Elasticsearch(host) logger.info('...success!') ElasticsearchUtils.create_alias_and_index(es, alias)
def handle(self, *args, **options): for backend_name, host_cong in settings.ELASTICSEARCH_DSL.items(): logger.info( 'Attempting to establish initial connection to Elasticsearch host [%s]...', host_cong['hosts']) es_connection = get_connection(backend_name) es_connection.ping() logger.info('...success!') for index in registry.get_indices(): ElasticsearchUtils.create_alias_and_index( es_connection, index, backend_name)
def haystack_default_connection(haystack_add_xdist_suffix_to_index_name): # pylint: disable=redefined-outer-name,unused-argument skip_if_no_django() backend = haystack_connections['default'].get_backend() # Force Haystack to update the mapping for the index backend.setup_complete = False es = backend.conn index_name = backend.index_name ElasticsearchUtils.delete_index(es, index_name) ElasticsearchUtils.create_alias_and_index(es, index_name) ElasticsearchUtils.refresh_index(es, index_name) yield backend ElasticsearchUtils.delete_index(es, index_name)
def test_handle(self): """ Verify the command removes all but the newest indexes. """ # Create initial index with alias ElasticsearchUtils.create_alias_and_index( es_connection=self.backend.conn, alias=self.backend.index_name) # Use now as initial time, so indexes are created AFTER the current index so expected values are accurate initial_time = datetime.datetime.now() # Create 2 more indexes than we expect to exist after removal for number in range(1, settings.HAYSTACK_INDEX_RETENTION_LIMIT + 2): current_time = initial_time + datetime.timedelta(seconds=number) freezer = freeze_time(current_time) freezer.start() ElasticsearchUtils.create_index(es_connection=self.backend.conn, prefix=self.backend.index_name) freezer.stop() # Prune indexes and confirm the right indexes are removed call_command('remove_unused_indexes') current_alias_name = self.backend.index_name indices_client = self.backend.conn.indices current_alias = indices_client.get_alias(name=current_alias_name) indexes_to_keep = current_alias.keys() # check that we keep the current indexes, which we don't want removed all_indexes = self.get_current_index_names( indices_client=indices_client, index_prefix=self.backend.index_name) assert set(all_indexes).issuperset(set(indexes_to_keep)) # check that other indexes are removed, excepting those that don't hit the retention limit expected_count = settings.HAYSTACK_INDEX_RETENTION_LIMIT + len( indexes_to_keep) assert len(all_indexes) == expected_count # Attempt to prune indexes again and confirm that no indexes are removed call_command('remove_unused_indexes') # check that we keep the current indexes, which we don't want removed all_indexes = self.get_current_index_names( indices_client=indices_client, index_prefix=self.backend.index_name) assert set(all_indexes).issuperset(set(indexes_to_keep)) # check that index count remains the same as before assert len(all_indexes) == expected_count
def reset_index(self): """ Deletes and re-creates the Elasticsearch index. """ self.delete_index(self.index) ElasticsearchUtils.create_alias_and_index(self.es, self.index)