def search(self, query): # TODO use search_parameters # TODO should parse_search_string be removed from nodes_collection? _, search_string = parse_search_string(query.search) def filter_func(raw_node): return len(search_string) == 0 or \ search_string.upper() in raw_node['title'].upper() res = list(filter(filter_func, self.list_of_nodes)) return { 'list': res, 'metadata': [ { 'total': len(res) }, ], }
def get_db_nodes(status='', base_node_names=None, search='', per_page=20, offset=0, user_id=None): """Get subset of the Nodes. Args: status (str, None): Node Running Status base_node_names (str, list of str, None): Node Running Status search (str, None): Search pattern per_page (int): Number of Nodes per page offset (int): Offset Return: (list of dict) List of Nodes in dict format """ if status and isinstance(status, basestring): status = [status] if base_node_names and isinstance(base_node_names, basestring): base_node_names = [base_node_names] aggregate_list = [] search_parameters, search_string = parse_search_string(search) # Match and_query = {} if base_node_names: and_query['base_node_name'] = {'$in': base_node_names} if status: and_query['node_status'] = {'$in': status} if search_string: and_query['$text'] = {'$search': search_string} if len(and_query): aggregate_list.append({"$match": and_query}) # Join with users aggregate_list.append({ '$lookup': { 'from': 'users', 'localField': 'author', 'foreignField': '_id', 'as': '_user' } }) # rm password hash aggregate_list.append({"$project": { "_user.password_hash": 0, }}) # Match username and_query = {} if 'author' in search_parameters: and_query['_user.username'] = search_parameters['author'] if len(and_query): aggregate_list.append({"$match": and_query}) # sort sort_dict = OrderedDict() if 'sort' in search_parameters: # TODO more sort options if search_parameters['sort'] == 'starred': sort_dict['starred'] = -1 sort_dict['insertion_date'] = -1 aggregate_list.append({"$sort": sort_dict}) aggregate_list.append({ "$addFields": { '_readonly': { '$ne': ["$author", to_object_id(user_id)] }, } }) # counts and pagination aggregate_list.append({ '$facet': { "metadata": [{ "$count": "total" }], "list": [{ "$skip": int(offset) }, { "$limit": int(per_page) }], } }) return next(get_db_connector().nodes.aggregate(aggregate_list), None)
def get_db_graphs(search='', per_page=20, offset=0, status=None): """Get subset of the Graphs. Args: search (str): Search pattern per_page (int): Number of Graphs per page offset (int): Offset Return: (list of dicts) List of Graphs in dict format """ if status and isinstance(status, str): status = [status] aggregate_list = [] search_parameters, search_string = parse_search_string(search) # Match and_query = {} if status: and_query['graph_running_status'] = {'$in': status} if search_string: and_query['$text'] = {'$search': search_string} if len(and_query): aggregate_list.append({"$match": and_query}) # Join with users aggregate_list.append({ '$lookup': { 'from': 'users', 'localField': 'author', 'foreignField': '_id', 'as': '_user' } }) # rm password hash aggregate_list.append({"$project": { "_user.password_hash": 0, }}) # Match username and_query = {} if 'author' in search_parameters: and_query['_user.username'] = search_parameters['author'] if len(and_query): aggregate_list.append({"$match": and_query}) # sort sort_key = search_parameters.get('order', 'insertion_date') try: sort_order = -1 if strtobool(search_parameters.get('desc', '1')) else 1 except ValueError: sort_order = -1 aggregate_list.append({"$sort": {sort_key: sort_order}}) # counts and pagination aggregate_list.append({ '$facet': { "metadata": [{ "$count": "total" }], "list": [{ "$skip": int(offset) }, { "$limit": int(per_page) }], } }) return next(get_db_connector().graphs.aggregate(aggregate_list), None)