def quick_search_result_counter(request_user: User): regex = request.args.get('searchValue', Search.DEFAULT_REGEX, str) plb = PipelineBuilder() regex = plb.regex_('fields.value', f'{regex}', 'ims') pipe_match = plb.match_(regex) plb.add_pipe(pipe_match) pipe_count = plb.count_('count') plb.add_pipe(pipe_count) pipeline = plb.pipeline try: result = list( object_manager.aggregate(collection='framework.objects', pipeline=pipeline)) except Exception as err: LOGGER.error(f'[Search count]: {err}') return abort(400) if len(result) > 0: return make_response(result[0]['count']) else: return make_response(0)
def aggregate(self, pipeline: Pipeline, request_user: UserModel = None, limit: int = Search.DEFAULT_LIMIT, skip: int = Search.DEFAULT_SKIP, **kwargs) -> SearchResult[RenderResult]: """ Use mongodb aggregation system with pipeline queries Args: pipeline (Pipeline): list of requirement pipes request_user (UserModel): user who started this search limit (int): max number of documents to return skip (int): number of documents to be skipped **kwargs: Returns: SearchResult with generic list of RenderResults """ # Insert skip and limit plb = PipelineBuilder(pipeline) # define search output stages: dict = {} active = kwargs.get('active', True) if kwargs.get('resolve', False): plb.add_pipe( plb.lookup_sub_(from_='framework.objects', let_={'ref_id': '$public_id'}, pipeline_=[ plb.match_({ '$expr': { '$in': ['$$ref_id', '$fields.value'] } }) ], as_='refs')) if active: active_pipe = [{ '$match': { 'active': { "$eq": True } } }, { '$match': { '$expr': { '$in': ['$$ref_id', '$fields.value'] } } }] else: active_pipe = [{ '$match': { '$expr': { '$in': ['$$ref_id', '$fields.value'] } } }] plb.add_pipe( plb.facet_({ 'root': [{ '$replaceRoot': { 'newRoot': { '$mergeObjects': ['$$ROOT'] } } }], 'references': [{ '$lookup': { 'from': 'framework.objects', 'let': { 'ref_id': '$public_id' }, 'pipeline': active_pipe, 'as': 'refs' } }, { '$unwind': '$refs' }, { '$replaceRoot': { 'newRoot': '$refs' } }] })) plb.add_pipe( plb.project_(specification={ 'complete': { '$concatArrays': ['$root', '$references'] } })) plb.add_pipe(plb.unwind_(path='$complete')) plb.add_pipe({'$replaceRoot': {'newRoot': '$complete'}}) stages.update({'metadata': [PipelineBuilder.count_('total')]}) stages.update({ 'data': [PipelineBuilder.skip_(skip), PipelineBuilder.limit_(limit)] }) group_stage: dict = { 'group': [ PipelineBuilder.lookup_(TypeModel.COLLECTION, 'type_id', 'public_id', 'lookup_data'), PipelineBuilder.unwind_('$lookup_data'), PipelineBuilder.project_({ '_id': 0, 'type_id': 1, 'label': '$lookup_data.label' }), PipelineBuilder.group_('$$ROOT.type_id', { 'types': { '$first': '$$ROOT' }, 'total': { '$sum': 1 } }), PipelineBuilder.project_({ '_id': 0, 'searchText': '$types.label', 'searchForm': 'type', 'searchLabel': '$types.label', 'settings': { 'types': ['$types.type_id'] }, 'total': 1 }), PipelineBuilder.sort_('total', -1) ] } stages.update(group_stage) plb.add_pipe(PipelineBuilder.facet_(stages)) raw_search_result = self.manager.aggregate( collection=CmdbObject.COLLECTION, pipeline=plb.pipeline) raw_search_result_list = list(raw_search_result) try: matches_regex = plb.get_regex_pipes_values() except Exception as err: LOGGER.error(f'Extract regex pipes: {err}') matches_regex = [] if len(raw_search_result_list[0]['data']) > 0: raw_search_result_list_entry = raw_search_result_list[0] # parse result list pre_rendered_result_list = [ CmdbObject(**raw_result) for raw_result in raw_search_result_list_entry['data'] ] rendered_result_list = RenderList( pre_rendered_result_list, request_user, object_manager=self.manager).render_result_list() total_results = raw_search_result_list_entry['metadata'][0].get( 'total', 0) group_result_list = raw_search_result_list[0]['group'] else: rendered_result_list = [] group_result_list = [] total_results = 0 # generate output search_result = SearchResult[RenderResult]( results=rendered_result_list, total_results=total_results, groups=group_result_list, alive=raw_search_result.alive, matches_regex=matches_regex, limit=limit, skip=skip) return search_result
def aggregate(self, pipeline: Pipeline, request_user: User = None, limit: int = Search.DEFAULT_LIMIT, skip: int = Search.DEFAULT_SKIP, **kwargs) -> SearchResult[RenderResult]: """ Use mongodb aggregation system with pipeline queries Args: pipeline (Pipeline): list of requirement pipes request_user (User): user who started this search matches_regex (List): list of regex match values limit (int): max number of documents to return skip (int): number of documents to be skipped **kwargs: Returns: SearchResult with generic list of RenderResults """ # Insert skip and limit plb = PipelineBuilder(pipeline) # define search output stages: dict = { 'metadata': [PipelineBuilder.count_('total')], 'data': [PipelineBuilder.skip_(skip), PipelineBuilder.limit_(limit)] } plb.add_pipe(PipelineBuilder.facet_(stages)) raw_search_result = self.manager.aggregate( collection=CmdbObject.COLLECTION, pipeline=plb.pipeline) raw_search_result_list = list(raw_search_result) try: matches_regex = plb.get_regex_pipes_values() except Exception as err: LOGGER.error(f'Extract regex pipes: {err}') matches_regex = [] if len(raw_search_result_list[0]['data']) > 0: raw_search_result_list_entry = raw_search_result_list[0] # parse result list pre_rendered_result_list = [ CmdbObject(**raw_result) for raw_result in raw_search_result_list_entry['data'] ] rendered_result_list = RenderList( pre_rendered_result_list, request_user, object_manager=self.manager).render_result_list() total_results = raw_search_result_list_entry['metadata'][0].get( 'total', 0) else: rendered_result_list = [] total_results = 0 # generate output search_result = SearchResult[RenderResult]( results=rendered_result_list, total_results=total_results, alive=raw_search_result.alive, matches_regex=matches_regex, limit=limit, skip=skip) return search_result
def quick_search_result_counter(): regex = request.args.get('searchValue', Search.DEFAULT_REGEX, str) plb = PipelineBuilder() regex = plb.regex_('fields.value', f'{regex}', 'ims') pipe_and = plb.and_([ regex, { 'active': { "$eq": True } } if _fetch_only_active_objs() else {} ]) pipe_match = plb.match_(pipe_and) plb.add_pipe(pipe_match) plb.add_pipe( {'$group': { "_id": { 'active': '$active' }, 'count': { '$sum': 1 } }}) plb.add_pipe({ '$group': { '_id': 0, 'levels': { '$push': { '_id': '$_id.active', 'count': '$count' } }, 'total': { '$sum': '$count' } } }) plb.add_pipe({'$unwind': '$levels'}) plb.add_pipe({'$sort': {"levels._id": -1}}) plb.add_pipe({ '$group': { '_id': 0, 'levels': { '$push': { 'count': "$levels.count" } }, "total": { '$avg': '$total' } } }) plb.add_pipe({ '$project': { 'total': "$total", 'active': { '$arrayElemAt': ["$levels", 0] }, 'inactive': { '$arrayElemAt': ["$levels", 1] } } }) plb.add_pipe({ '$project': { '_id': 0, 'active': { '$cond': [{ '$ifNull': ["$active", False] }, '$active.count', 0] }, 'inactive': { '$cond': [{ '$ifNull': ['$inactive', False] }, '$inactive.count', 0] }, 'total': '$total' } }) pipeline = plb.pipeline try: result = list( object_manager.aggregate(collection='framework.objects', pipeline=pipeline)) except Exception as err: LOGGER.error(f'[Search count]: {err}') return abort(400) if len(result) > 0: return make_response(result[0]) else: return make_response({'active': 0, 'inactive': 0, 'total': 0})
def aggregate(self, pipeline: Pipeline, request_user: User = None, limit: int = Search.DEFAULT_LIMIT, skip: int = Search.DEFAULT_SKIP, **kwargs) -> SearchResult[RenderResult]: """ Use mongodb aggregation system with pipeline queries Args: pipeline (Pipeline): list of requirement pipes request_user (User): user who started this search limit (int): max number of documents to return skip (int): number of documents to be skipped **kwargs: Returns: SearchResult with generic list of RenderResults """ # Insert skip and limit plb = PipelineBuilder(pipeline) # define search output stages: dict = { 'metadata': [PipelineBuilder.count_('total')], 'data': [ PipelineBuilder.skip_(skip), PipelineBuilder.limit_(limit) ], 'group': [ PipelineBuilder.lookup_(TypeModel.COLLECTION, 'type_id', 'public_id', 'lookup_data'), PipelineBuilder.unwind_('$lookup_data'), PipelineBuilder.project_({'_id': 0, 'type_id': 1, 'label': "$lookup_data.label"}), PipelineBuilder.group_("$$ROOT.type_id", {'types': {'$first': "$$ROOT"}, 'total': {'$sum': 1}}), PipelineBuilder.project_( {'_id': 0, 'searchText': '$types.label', 'searchForm': 'type', 'searchLabel': '$types.label', 'settings': {'types': ['$types.type_id']}, 'total': 1 }), PipelineBuilder.sort_("total", -1) ] } plb.add_pipe(PipelineBuilder.facet_(stages)) raw_search_result = self.manager.aggregate(collection=CmdbObject.COLLECTION, pipeline=plb.pipeline) raw_search_result_list = list(raw_search_result) try: matches_regex = plb.get_regex_pipes_values() except Exception as err: LOGGER.error(f'Extract regex pipes: {err}') matches_regex = [] if len(raw_search_result_list[0]['data']) > 0: raw_search_result_list_entry = raw_search_result_list[0] # parse result list pre_rendered_result_list = [CmdbObject(**raw_result) for raw_result in raw_search_result_list_entry['data']] rendered_result_list = RenderList(pre_rendered_result_list, request_user, object_manager=self.manager).render_result_list() total_results = raw_search_result_list_entry['metadata'][0].get('total', 0) group_result_list = raw_search_result_list[0]['group'] else: rendered_result_list = [] group_result_list = [] total_results = 0 # generate output search_result = SearchResult[RenderResult]( results=rendered_result_list, total_results=total_results, groups=group_result_list, alive=raw_search_result.alive, matches_regex=matches_regex, limit=limit, skip=skip ) return search_result