def remove_program_enrolled_user(program_enrollment_id): """ Remove a program-enrolled user from Elasticsearch. Args: program_enrollment_id (int): A program enrollment id which is the same as the document id to remove """ public_indices = get_aliases(PUBLIC_ENROLLMENT_INDEX_TYPE) for index in public_indices: _delete_item(program_enrollment_id, index=index) private_indices = get_aliases(PRIVATE_ENROLLMENT_INDEX_TYPE) for index in private_indices: _delete_item(program_enrollment_id, index=index)
def delete_indices(): """ Drop all the indices. Used in testing. """ conn = get_conn(verify=False) for index_type in ALL_INDEX_TYPES: aliases = get_aliases(index_type) for alias in aliases: if conn.indices.exists(alias): conn.indices.delete_alias(index=INDEX_WILDCARD, name=alias)
def delete_indices(): """ Drop all the indices. Used in testing. """ conn = get_conn(verify=False) for index_type in ALL_INDEX_TYPES: aliases = get_aliases(index_type) for alias in aliases: if conn.indices.exists(alias): conn.indices.delete(alias)
def delete_percolate_query(percolate_query_id): """ Remove a percolate query from Elasticsearch Args: percolate_query_id (int): The id of a deleted PercolateQuery """ aliases = get_aliases(PERCOLATE_INDEX_TYPE) for index in aliases: _delete_item(percolate_query_id, index=index)
def index_program_enrolled_users(program_enrollments, *, public_indices=None, private_indices=None, chunk_size=100): """ Bulk index an iterable of ProgramEnrollments Args: program_enrollments (iterable of ProgramEnrollment): An iterable of program enrollments public_indices (list of str): The indices to store public enrollment documents private_indices (list of str): The indices to store private enrollment documents chunk_size (int): The number of items per chunk to index """ if public_indices is None: public_indices = get_aliases(PUBLIC_ENROLLMENT_INDEX_TYPE) if private_indices is None: private_indices = get_aliases(PRIVATE_ENROLLMENT_INDEX_TYPE) # Serialize to a temporary file so we don't serialize twice (serializing is expensive) with open_json_stream() as json_stream: json_stream.write_stream( (document for document in _get_private_documents(program_enrollments))) for index in public_indices: _index_chunks( _get_public_documents(json_stream.read_stream()), index=index, chunk_size=chunk_size, ) for index in private_indices: _index_chunks( json_stream.read_stream(), index=index, chunk_size=chunk_size, )
def index_program_enrolled_users( program_enrollments, *, public_indices=None, private_indices=None, chunk_size=100 ): """ Bulk index an iterable of ProgramEnrollments Args: program_enrollments (iterable of ProgramEnrollment): An iterable of program enrollments public_indices (list of str): The indices to store public enrollment documents private_indices (list of str): The indices to store private enrollment documents chunk_size (int): The number of items per chunk to index """ if public_indices is None: public_indices = get_aliases(PUBLIC_ENROLLMENT_INDEX_TYPE) if private_indices is None: private_indices = get_aliases(PRIVATE_ENROLLMENT_INDEX_TYPE) # Serialize to a temporary file so we don't serialize twice (serializing is expensive) with open_json_stream() as json_stream: json_stream.write_stream( (document for document in _get_private_documents(program_enrollments)) ) for index in public_indices: _index_chunks( _get_public_documents(json_stream.read_stream()), index=index, chunk_size=chunk_size, ) for index in private_indices: _index_chunks( json_stream.read_stream(), index=index, chunk_size=chunk_size, )
def test_get_aliases(self, is_reindex, index_type, expected_indices): """ We should choose the correct alias and doc type given the circumstances """ conn = get_conn(verify=False) alias = make_alias_name(index_type, is_reindexing=False) backing_index = make_backing_index_name() # Skip the mapping because it's invalid for 2.x schema, and we don't need it here clear_and_create_index(backing_index, index_type=index_type, skip_mapping=True) conn.indices.put_alias(index=backing_index, name=alias) if is_reindex: conn.indices.put_alias(index=backing_index, name=make_alias_name(index_type, is_reindexing=True)) aliases = get_aliases(index_type) assert aliases == list(expected_indices) assert get_default_alias(index_type) == aliases[0]
def index_percolate_queries(percolate_queries, chunk_size=100): """ Index percolate queries Args: percolate_queries (iterable of PercolateQuery): An iterable of PercolateQuery chunk_size (int): Number of queries to index per chunk Returns: int: Number of indexed items """ aliases = get_aliases(PERCOLATE_INDEX_TYPE) count = 0 for index in aliases: count = _index_chunks( (_serialize_percolate_query(query) for query in percolate_queries), index=index, chunk_size=chunk_size, ) # All counts should be the same return count