def search_projects(self, request): """ API Endpoint to quick search user projects. TODO: This endpoint may need extending to search all public projects at a global level at some point. However, this means adding more context to the ProjectDocument search index. We are trying to keep them small and only add fields as required. For now this will suit our needs. DEPERECATED ----------- This code is currently deprecated as this search has been removed from all designs and concepts along with the PRD. Leaving code here in case the client decides to re-implement at some point in the future. """ projects = [] search_query = Index(name='projects').search(ProjectDocument) if request.q: search_query = search_query.keywords(request.q) for p in search_query.filter( assigned_users=self.current_user.pk)[:1000]: projects.append(self.project_index_mapper.map(p)) return OneSearchProjectListResponse(items=projects, is_list=True)
def put_document(document, index_name): """ Puts the given index document. Raises: PutError: If document failed to put. """ try: Index(name=index_name).put(document) except (TypeError, ValueError) as e: logging.exception(e)
def tag_get(self, request): """ API Endpoint to get a specific global tag. """ index = Index(name='tags') tag = index.get( str(request.id), document_class=AutoCompleteTagDocument) if not tag: raise NotFoundException( "Tag with id: {0} not found".format( request.id)) return self.tag_document_mapper.map(tag)
def search_tags(self, request): """ API Endpoint to search global tags. Object disposed after request is completed. """ results_limit = 10 project_search_results = [] global_search_results = [] i = Index(name='tags') search_query = i.search(AutoCompleteTagDocument) if request.q: search_query = search_query.filter(n_grams=request.q) if request.project_id: search_query = search_query.filter(project_ids=request.project_id) project_search_results = list(search_query[:results_limit]) # get 5 global results or more if we don't have enough project # results if len(project_search_results) < (results_limit / 2): remaining = results_limit - len(project_search_results) else: remaining = results_limit / 2 # hack - need to get the search package to clone queries # shouldn't need to build up an entirely new object search_query = i.search(AutoCompleteTagDocument).filter( ~Q(project_ids=request.project_id)) if request.q: search_query = search_query.filter(n_grams=request.q) # get global results global_search_results = list(search_query[:remaining]) else: global_search_results = search_query[:results_limit] return TagSplitListResponse( project_tags=[ self.tag_document_mapper.map( tag, project_id=request.project_id) for tag in project_search_results ], global_tags=[ self.tag_document_mapper.map(tag) for tag in global_search_results ])
def delete_document(doc_id, index_name): """ Tries to delete the given doc_id from the index """ try: Index(name=index_name).delete(str(doc_id)) except ValueError as e: logging.exception(e) except search_api.DeleteError as e: logging.exception(e) # only passing single docs here - so only a single result result = e.results[0] # if we aren't simply trying to delete a non-existant doc then raise if result.message != u'Not found': raise