def handle_delete(request): """Deletes an index""" index_to_delete = request.POST['delete_index'] # Rule 1: Has to start with the ES_INDEX_PREFIX. if not index_to_delete.startswith(settings.ES_INDEX_PREFIX): raise DeleteError('"%s" is not a valid index name.' % index_to_delete) # Rule 2: Must be an existing index. indexes = [name for name, count in get_indexes()] if index_to_delete not in indexes: raise DeleteError('"%s" does not exist.' % index_to_delete) # Rule 3: Don't delete the read index. if index_to_delete == read_index(): raise DeleteError('"%s" is the read index.' % index_to_delete) delete_index(index_to_delete) return HttpResponseRedirect(request.path)
def handle_delete(request): """Deletes an index""" index_to_delete = request.POST.get('delete_index') es_indexes = [name for (name, count) in get_indexes()] # Rule 1: Has to start with the ES_INDEX_PREFIX. if not index_to_delete.startswith(settings.ES_INDEX_PREFIX): raise DeleteError('"%s" is not a valid index name.' % index_to_delete) # Rule 2: Must be an existing index. if index_to_delete not in es_indexes: raise DeleteError('"%s" does not exist.' % index_to_delete) # Rule 3: Don't delete the default read index. # TODO: When the critical index exists, this should be "Don't # delete the critical read index." if index_to_delete == read_index('default'): raise DeleteError('"%s" is the default read index.' % index_to_delete) # The index is ok to delete delete_index(index_to_delete) return HttpResponseRedirect(request.path)
es_deets = requests.get(settings.ES_URLS[0]).json() except requests.exceptions.RequestException: pass try: stats = get_doctype_stats(read_index()) except ES_EXCEPTIONS: pass try: write_stats = get_doctype_stats(write_index()) except ES_EXCEPTIONS: pass try: indexes = get_indexes() indexes.sort(key=lambda m: m[0]) except ES_EXCEPTIONS as e: error_messages.append('Error: {0}'.format(repr(e))) try: client = redis_client('default') outstanding_chunks = int(client.get(OUTSTANDING_INDEX_CHUNKS)) except (RedisError, TypeError): pass recent_records = Record.uncached.order_by('-starttime')[:20] return render( request, 'admin/search_maintenance.html',
def search(request): """Render the admin view containing search tools""" if not request.user.has_perm('search.reindex'): raise PermissionDenied error_messages = [] stats = {} if 'reset' in request.POST: try: return handle_reset(request) except ReindexError as e: error_messages.append(u'Error: %s' % e.message) if 'reindex' in request.POST: try: return handle_reindex(request) except ReindexError as e: error_messages.append(u'Error: %s' % e.message) if 'recreate_index' in request.POST: try: return handle_recreate_index(request) except ReindexError as e: error_messages.append(u'Error: %s' % e.message) if 'delete_index' in request.POST: try: return handle_delete(request) except DeleteError as e: error_messages.append(u'Error: %s' % e.message) except ES_EXCEPTIONS as e: error_messages.append('Error: {0}'.format(repr(e))) stats = None write_stats = None es_deets = None indexes = [] outstanding_chunks = None try: # TODO: SUMO has a single ES_URL and that's the ZLB and does # the balancing. If that ever changes and we have multiple # ES_URLs, then this should get fixed. es_deets = requests.get(settings.ES_URLS[0]).json() except requests.exceptions.RequestException: pass stats = {} for index in all_read_indexes(): try: stats[index] = get_doctype_stats(index) except ES_EXCEPTIONS: stats[index] = None write_stats = {} for index in all_write_indexes(): try: write_stats[index] = get_doctype_stats(index) except ES_EXCEPTIONS: write_stats[index] = None try: indexes = get_indexes() indexes.sort(key=lambda m: m[0]) except ES_EXCEPTIONS as e: error_messages.append('Error: {0}'.format(repr(e))) try: client = redis_client('default') outstanding_chunks = int(client.get(OUTSTANDING_INDEX_CHUNKS)) except (RedisError, TypeError): pass recent_records = Record.uncached.order_by('-starttime')[:100] outstanding_records = (Record.uncached.filter(endtime__isnull=True) .order_by('-starttime')) index_groups = set(settings.ES_INDEXES.keys()) index_groups |= set(settings.ES_WRITE_INDEXES.keys()) index_group_data = [[group, read_index(group), write_index(group)] for group in index_groups] return render( request, 'admin/search_maintenance.html', {'title': 'Search', 'es_deets': es_deets, 'doctype_stats': stats, 'doctype_write_stats': write_stats, 'indexes': indexes, 'index_groups': index_groups, 'index_group_data': index_group_data, 'read_indexes': all_read_indexes, 'write_indexes': all_write_indexes, 'error_messages': error_messages, 'recent_records': recent_records, 'outstanding_records': outstanding_records, 'outstanding_chunks': outstanding_chunks, 'now': datetime.now(), 'read_index': read_index, 'write_index': write_index, })
def search(request): """Render the admin view containing search tools""" if not request.user.has_perm('search.reindex'): raise PermissionDenied error_messages = [] stats = {} if 'reset' in request.POST: try: return handle_reset(request) except ReindexError as e: error_messages.append('Error: %s' % e.message) if 'reindex' in request.POST: try: return handle_reindex(request) except ReindexError as e: error_messages.append('Error: %s' % e.message) if 'recreate_index' in request.POST: try: return handle_recreate_index(request) except ReindexError as e: error_messages.append('Error: %s' % e.message) if 'delete_index' in request.POST: try: return handle_delete(request) except DeleteError as e: error_messages.append('Error: %s' % e.message) except ES_EXCEPTIONS as e: error_messages.append('Error: {0}'.format(repr(e))) stats = None write_stats = None es_deets = None indexes = [] try: # TODO: SUMO has a single ES_URL and that's the ZLB and does # the balancing. If that ever changes and we have multiple # ES_URLs, then this should get fixed. es_deets = requests.get(settings.ES_URLS[0]).json() except requests.exceptions.RequestException: pass stats = {} for index in all_read_indexes(): try: stats[index] = get_doctype_stats(index) except ES_EXCEPTIONS: stats[index] = None write_stats = {} for index in all_write_indexes(): try: write_stats[index] = get_doctype_stats(index) except ES_EXCEPTIONS: write_stats[index] = None try: indexes = get_indexes() indexes.sort(key=lambda m: m[0]) except ES_EXCEPTIONS as e: error_messages.append('Error: {0}'.format(repr(e))) recent_records = Record.objects.all()[:100] outstanding_records = Record.objects.outstanding() index_groups = set(settings.ES_INDEXES.keys()) index_groups |= set(settings.ES_WRITE_INDEXES.keys()) index_group_data = [[group, read_index(group), write_index(group)] for group in index_groups] return render( request, 'admin/search_maintenance.html', { 'title': 'Search', 'es_deets': es_deets, 'doctype_stats': stats, 'doctype_write_stats': write_stats, 'indexes': indexes, 'index_groups': index_groups, 'index_group_data': index_group_data, 'read_indexes': all_read_indexes, 'write_indexes': all_write_indexes, 'error_messages': error_messages, 'recent_records': recent_records, 'outstanding_records': outstanding_records, 'now': datetime.now(), 'read_index': read_index, 'write_index': write_index, })
def search(request): """Render the admin view containing search tools""" if not request.user.has_perm("search.reindex"): raise PermissionDenied error_messages = [] stats = {} if "reset" in request.POST: try: return handle_reset(request) except ReindexError as e: error_messages.append(u"Error: %s" % e.message) if "reindex" in request.POST: try: return handle_reindex(request) except ReindexError as e: error_messages.append(u"Error: %s" % e.message) if "recreate_index" in request.POST: try: return handle_recreate_index(request) except ReindexError as e: error_messages.append(u"Error: %s" % e.message) if "delete_index" in request.POST: try: return handle_delete(request) except DeleteError as e: error_messages.append(u"Error: %s" % e.message) except ES_EXCEPTIONS as e: error_messages.append("Error: {0}".format(repr(e))) stats = None write_stats = None es_deets = None indexes = [] outstanding_chunks = None try: # TODO: SUMO has a single ES_URL and that's the ZLB and does # the balancing. If that ever changes and we have multiple # ES_URLs, then this should get fixed. es_deets = requests.get(settings.ES_URLS[0]).json() except requests.exceptions.RequestException: pass stats = {} for index in all_read_indexes(): try: stats[index] = get_doctype_stats(index) except ES_EXCEPTIONS: stats[index] = None write_stats = {} for index in all_write_indexes(): try: write_stats[index] = get_doctype_stats(index) except ES_EXCEPTIONS: write_stats[index] = None try: indexes = get_indexes() indexes.sort(key=lambda m: m[0]) except ES_EXCEPTIONS as e: error_messages.append("Error: {0}".format(repr(e))) try: client = redis_client("default") outstanding_chunks = int(client.get(OUTSTANDING_INDEX_CHUNKS)) except (RedisError, TypeError): pass recent_records = Record.objects.order_by("-starttime")[:100] outstanding_records = Record.objects.filter(endtime__isnull=True).order_by("-starttime") index_groups = set(settings.ES_INDEXES.keys()) index_groups |= set(settings.ES_WRITE_INDEXES.keys()) index_group_data = [[group, read_index(group), write_index(group)] for group in index_groups] return render( request, "admin/search_maintenance.html", { "title": "Search", "es_deets": es_deets, "doctype_stats": stats, "doctype_write_stats": write_stats, "indexes": indexes, "index_groups": index_groups, "index_group_data": index_group_data, "read_indexes": all_read_indexes, "write_indexes": all_write_indexes, "error_messages": error_messages, "recent_records": recent_records, "outstanding_records": outstanding_records, "outstanding_chunks": outstanding_chunks, "now": datetime.now(), "read_index": read_index, "write_index": write_index, }, )
es_deets = requests.get(settings.ES_URLS[0]).json() except requests.exceptions.RequestException: pass try: stats = get_doctype_stats(read_index()) except ES_EXCEPTIONS: pass try: write_stats = get_doctype_stats(write_index()) except ES_EXCEPTIONS: pass try: indexes = get_indexes() indexes.sort(key=lambda m: m[0]) except ES_EXCEPTIONS as e: error_messages.append('Error: {0}'.format(repr(e))) try: client = redis_client('default') outstanding_chunks = int(client.get(OUTSTANDING_INDEX_CHUNKS)) except (RedisError, TypeError): pass recent_records = Record.uncached.order_by('-starttime')[:20] return render( request, 'admin/search_maintenance.html', { 'title': 'Search',