Exemple #1
0
    def test_pipeline_builder(self):
        """Test the pipeline builder"""
        # preset
        preset = Pipeline([{'find': 1}])
        pipeline_builder = PipelineBuilder(preset)
        assert pipeline_builder.pipeline == preset

        # clear
        empty = Pipeline([])
        pipeline_builder.clear()
        assert pipeline_builder.pipeline == empty
Exemple #2
0
def search_framework(request_user: UserModel):
    try:
        limit = request.args.get('limit', Search.DEFAULT_LIMIT, int)
        skip = request.args.get('skip', Search.DEFAULT_SKIP, int)
        only_active = _fetch_only_active_objs()
        search_params: dict = request.args.get('query') or '{}'
        resolve_object_references: bool = request.args.get('resolve', False)
    except ValueError as err:
        return abort(400, err)
    try:
        if request.method == 'GET':
            search_params = json.loads(search_params)
        elif request.method == 'POST':
            search_params = json.loads(request.data)
        else:
            return abort(405)
    except Exception as err:
        LOGGER.error(f'[Search Framework]: {err}')
        return abort(400, err)
    try:
        builder = PipelineBuilder()
        search_parameters = SearchParam.from_request(search_params)

        query: Pipeline = builder.build(search_parameters, object_manager,
                                        only_active)

        searcher = SearcherFramework(manager=object_manager)
        result = searcher.aggregate(pipeline=query,
                                    request_user=request_user,
                                    limit=limit,
                                    skip=skip,
                                    resolve=resolve_object_references,
                                    active=only_active)

    except Exception as err:
        LOGGER.error(f'[Search Framework Rest]: {err}')
        return make_response([], 204)

    return make_response(result)
Exemple #3
0
def quick_search_result_counter(request_user: User):
    regex = request.args.get('searchValue', Search.DEFAULT_REGEX, str)
    plb = PipelineBuilder()
    regex = plb.regex_('fields.value', f'{regex}', 'ims')
    pipe_match = plb.match_(regex)
    plb.add_pipe(pipe_match)
    pipe_count = plb.count_('count')
    plb.add_pipe(pipe_count)
    pipeline = plb.pipeline
    try:
        result = list(
            object_manager.aggregate(collection='framework.objects',
                                     pipeline=pipeline))
    except Exception as err:
        LOGGER.error(f'[Search count]: {err}')
        return abort(400)
    if len(result) > 0:
        return make_response(result[0]['count'])
    else:
        return make_response(0)
Exemple #4
0
    def aggregate(self,
                  pipeline: Pipeline,
                  request_user: UserModel = None,
                  limit: int = Search.DEFAULT_LIMIT,
                  skip: int = Search.DEFAULT_SKIP,
                  **kwargs) -> SearchResult[RenderResult]:
        """
        Use mongodb aggregation system with pipeline queries
        Args:
            pipeline (Pipeline): list of requirement pipes
            request_user (UserModel): user who started this search
            limit (int): max number of documents to return
            skip (int): number of documents to be skipped
            **kwargs:
        Returns:
            SearchResult with generic list of RenderResults
        """
        # Insert skip and limit
        plb = PipelineBuilder(pipeline)

        # define search output
        stages: dict = {}
        active = kwargs.get('active', True)

        if kwargs.get('resolve', False):
            plb.add_pipe(
                plb.lookup_sub_(from_='framework.objects',
                                let_={'ref_id': '$public_id'},
                                pipeline_=[
                                    plb.match_({
                                        '$expr': {
                                            '$in':
                                            ['$$ref_id', '$fields.value']
                                        }
                                    })
                                ],
                                as_='refs'))
            if active:
                active_pipe = [{
                    '$match': {
                        'active': {
                            "$eq": True
                        }
                    }
                }, {
                    '$match': {
                        '$expr': {
                            '$in': ['$$ref_id', '$fields.value']
                        }
                    }
                }]
            else:
                active_pipe = [{
                    '$match': {
                        '$expr': {
                            '$in': ['$$ref_id', '$fields.value']
                        }
                    }
                }]
            plb.add_pipe(
                plb.facet_({
                    'root': [{
                        '$replaceRoot': {
                            'newRoot': {
                                '$mergeObjects': ['$$ROOT']
                            }
                        }
                    }],
                    'references': [{
                        '$lookup': {
                            'from': 'framework.objects',
                            'let': {
                                'ref_id': '$public_id'
                            },
                            'pipeline': active_pipe,
                            'as': 'refs'
                        }
                    }, {
                        '$unwind': '$refs'
                    }, {
                        '$replaceRoot': {
                            'newRoot': '$refs'
                        }
                    }]
                }))
            plb.add_pipe(
                plb.project_(specification={
                    'complete': {
                        '$concatArrays': ['$root', '$references']
                    }
                }))
            plb.add_pipe(plb.unwind_(path='$complete'))
            plb.add_pipe({'$replaceRoot': {'newRoot': '$complete'}})

        stages.update({'metadata': [PipelineBuilder.count_('total')]})
        stages.update({
            'data':
            [PipelineBuilder.skip_(skip),
             PipelineBuilder.limit_(limit)]
        })

        group_stage: dict = {
            'group': [
                PipelineBuilder.lookup_(TypeModel.COLLECTION, 'type_id',
                                        'public_id', 'lookup_data'),
                PipelineBuilder.unwind_('$lookup_data'),
                PipelineBuilder.project_({
                    '_id': 0,
                    'type_id': 1,
                    'label': '$lookup_data.label'
                }),
                PipelineBuilder.group_('$$ROOT.type_id', {
                    'types': {
                        '$first': '$$ROOT'
                    },
                    'total': {
                        '$sum': 1
                    }
                }),
                PipelineBuilder.project_({
                    '_id': 0,
                    'searchText': '$types.label',
                    'searchForm': 'type',
                    'searchLabel': '$types.label',
                    'settings': {
                        'types': ['$types.type_id']
                    },
                    'total': 1
                }),
                PipelineBuilder.sort_('total', -1)
            ]
        }
        stages.update(group_stage)
        plb.add_pipe(PipelineBuilder.facet_(stages))

        raw_search_result = self.manager.aggregate(
            collection=CmdbObject.COLLECTION, pipeline=plb.pipeline)
        raw_search_result_list = list(raw_search_result)

        try:
            matches_regex = plb.get_regex_pipes_values()
        except Exception as err:
            LOGGER.error(f'Extract regex pipes: {err}')
            matches_regex = []

        if len(raw_search_result_list[0]['data']) > 0:
            raw_search_result_list_entry = raw_search_result_list[0]
            # parse result list
            pre_rendered_result_list = [
                CmdbObject(**raw_result)
                for raw_result in raw_search_result_list_entry['data']
            ]
            rendered_result_list = RenderList(
                pre_rendered_result_list,
                request_user,
                object_manager=self.manager).render_result_list()

            total_results = raw_search_result_list_entry['metadata'][0].get(
                'total', 0)
            group_result_list = raw_search_result_list[0]['group']

        else:
            rendered_result_list = []
            group_result_list = []
            total_results = 0
        # generate output
        search_result = SearchResult[RenderResult](
            results=rendered_result_list,
            total_results=total_results,
            groups=group_result_list,
            alive=raw_search_result.alive,
            matches_regex=matches_regex,
            limit=limit,
            skip=skip)
        return search_result
Exemple #5
0
    def aggregate(self,
                  pipeline: Pipeline,
                  request_user: User = None,
                  limit: int = Search.DEFAULT_LIMIT,
                  skip: int = Search.DEFAULT_SKIP,
                  **kwargs) -> SearchResult[RenderResult]:
        """
        Use mongodb aggregation system with pipeline queries
        Args:
            pipeline (Pipeline): list of requirement pipes
            request_user (User): user who started this search
            matches_regex (List): list of regex match values
            limit (int): max number of documents to return
            skip (int): number of documents to be skipped
            **kwargs:
        Returns:
            SearchResult with generic list of RenderResults
        """
        # Insert skip and limit
        plb = PipelineBuilder(pipeline)

        # define search output
        stages: dict = {
            'metadata': [PipelineBuilder.count_('total')],
            'data':
            [PipelineBuilder.skip_(skip),
             PipelineBuilder.limit_(limit)]
        }
        plb.add_pipe(PipelineBuilder.facet_(stages))

        raw_search_result = self.manager.aggregate(
            collection=CmdbObject.COLLECTION, pipeline=plb.pipeline)
        raw_search_result_list = list(raw_search_result)
        try:
            matches_regex = plb.get_regex_pipes_values()
        except Exception as err:
            LOGGER.error(f'Extract regex pipes: {err}')
            matches_regex = []

        if len(raw_search_result_list[0]['data']) > 0:
            raw_search_result_list_entry = raw_search_result_list[0]
            # parse result list
            pre_rendered_result_list = [
                CmdbObject(**raw_result)
                for raw_result in raw_search_result_list_entry['data']
            ]
            rendered_result_list = RenderList(
                pre_rendered_result_list,
                request_user,
                object_manager=self.manager).render_result_list()
            total_results = raw_search_result_list_entry['metadata'][0].get(
                'total', 0)
        else:
            rendered_result_list = []
            total_results = 0
        # generate output
        search_result = SearchResult[RenderResult](
            results=rendered_result_list,
            total_results=total_results,
            alive=raw_search_result.alive,
            matches_regex=matches_regex,
            limit=limit,
            skip=skip)
        return search_result
Exemple #6
0
def quick_search_result_counter():
    regex = request.args.get('searchValue', Search.DEFAULT_REGEX, str)

    plb = PipelineBuilder()
    regex = plb.regex_('fields.value', f'{regex}', 'ims')
    pipe_and = plb.and_([
        regex, {
            'active': {
                "$eq": True
            }
        } if _fetch_only_active_objs() else {}
    ])
    pipe_match = plb.match_(pipe_and)
    plb.add_pipe(pipe_match)
    plb.add_pipe(
        {'$group': {
            "_id": {
                'active': '$active'
            },
            'count': {
                '$sum': 1
            }
        }})
    plb.add_pipe({
        '$group': {
            '_id': 0,
            'levels': {
                '$push': {
                    '_id': '$_id.active',
                    'count': '$count'
                }
            },
            'total': {
                '$sum': '$count'
            }
        }
    })
    plb.add_pipe({'$unwind': '$levels'})
    plb.add_pipe({'$sort': {"levels._id": -1}})
    plb.add_pipe({
        '$group': {
            '_id': 0,
            'levels': {
                '$push': {
                    'count': "$levels.count"
                }
            },
            "total": {
                '$avg': '$total'
            }
        }
    })
    plb.add_pipe({
        '$project': {
            'total': "$total",
            'active': {
                '$arrayElemAt': ["$levels", 0]
            },
            'inactive': {
                '$arrayElemAt': ["$levels", 1]
            }
        }
    })
    plb.add_pipe({
        '$project': {
            '_id': 0,
            'active': {
                '$cond': [{
                    '$ifNull': ["$active", False]
                }, '$active.count', 0]
            },
            'inactive': {
                '$cond': [{
                    '$ifNull': ['$inactive', False]
                }, '$inactive.count', 0]
            },
            'total': '$total'
        }
    })
    pipeline = plb.pipeline
    try:
        result = list(
            object_manager.aggregate(collection='framework.objects',
                                     pipeline=pipeline))
    except Exception as err:
        LOGGER.error(f'[Search count]: {err}')
        return abort(400)
    if len(result) > 0:
        return make_response(result[0])
    else:
        return make_response({'active': 0, 'inactive': 0, 'total': 0})
Exemple #7
0
    def aggregate(self, pipeline: Pipeline, request_user: User = None, limit: int = Search.DEFAULT_LIMIT,
                  skip: int = Search.DEFAULT_SKIP, **kwargs) -> SearchResult[RenderResult]:
        """
        Use mongodb aggregation system with pipeline queries
        Args:
            pipeline (Pipeline): list of requirement pipes
            request_user (User): user who started this search
            limit (int): max number of documents to return
            skip (int): number of documents to be skipped
            **kwargs:
        Returns:
            SearchResult with generic list of RenderResults
        """
        # Insert skip and limit
        plb = PipelineBuilder(pipeline)

        # define search output
        stages: dict = {
            'metadata': [PipelineBuilder.count_('total')],
            'data': [
                PipelineBuilder.skip_(skip),
                PipelineBuilder.limit_(limit)
            ],
            'group': [
                PipelineBuilder.lookup_(TypeModel.COLLECTION, 'type_id', 'public_id', 'lookup_data'),
                PipelineBuilder.unwind_('$lookup_data'),
                PipelineBuilder.project_({'_id': 0, 'type_id': 1, 'label': "$lookup_data.label"}),
                PipelineBuilder.group_("$$ROOT.type_id", {'types': {'$first': "$$ROOT"}, 'total': {'$sum': 1}}),
                PipelineBuilder.project_(
                    {'_id': 0,
                     'searchText': '$types.label',
                     'searchForm': 'type',
                     'searchLabel': '$types.label',
                     'settings': {'types': ['$types.type_id']},
                     'total': 1
                     }),
                PipelineBuilder.sort_("total", -1)
            ]
        }
        plb.add_pipe(PipelineBuilder.facet_(stages))

        raw_search_result = self.manager.aggregate(collection=CmdbObject.COLLECTION, pipeline=plb.pipeline)
        raw_search_result_list = list(raw_search_result)
        try:
            matches_regex = plb.get_regex_pipes_values()
        except Exception as err:
            LOGGER.error(f'Extract regex pipes: {err}')
            matches_regex = []

        if len(raw_search_result_list[0]['data']) > 0:
            raw_search_result_list_entry = raw_search_result_list[0]
            # parse result list
            pre_rendered_result_list = [CmdbObject(**raw_result) for raw_result in raw_search_result_list_entry['data']]
            rendered_result_list = RenderList(pre_rendered_result_list, request_user,
                                              object_manager=self.manager).render_result_list()
            total_results = raw_search_result_list_entry['metadata'][0].get('total', 0)
            group_result_list = raw_search_result_list[0]['group']
        else:
            rendered_result_list = []
            group_result_list = []
            total_results = 0
        # generate output
        search_result = SearchResult[RenderResult](
            results=rendered_result_list,
            total_results=total_results,
            groups=group_result_list,
            alive=raw_search_result.alive,
            matches_regex=matches_regex,
            limit=limit,
            skip=skip
        )
        return search_result