def search(self, index=None, doc_type=None, body=None, params=None): searchable_indexes = self._normalize_index_to_list(index) matches = [] for searchable_index in searchable_indexes: for document in self.__documents_dict[searchable_index]: if doc_type: if isinstance( doc_type, list) and document.get('_type') not in doc_type: continue if isinstance(doc_type, str) and document.get('_type') != doc_type: continue matches.append(document) result = { 'hits': { 'total': len(matches), 'max_score': 1.0 }, '_shards': { # Simulate indexes with 1 shard each 'successful': len(searchable_indexes), 'failed': 0, 'total': len(searchable_indexes) }, 'took': 1, 'timed_out': False } hits = [] for match in matches: match['_score'] = 1.0 hits.append(match) if 'scroll' in params: result['_scroll_id'] = str(get_random_scroll_id()) params['size'] = int( params.get('size') if 'size' in params else 10) params['from'] = int( params.get('from') + params.get('size') if 'from' in params else 0) self.__scrolls[result.get('_scroll_id')] = { 'index': index, 'doc_type': doc_type, 'body': body, 'params': params } hits = hits[params.get('from'):params.get('from') + params.get('size')] result['hits']['hits'] = hits return result
def search(self, index=None, doc_type=None, body=None, params=None): searchable_indexes = self._normalize_index_to_list(index) matches = [] for searchable_index in searchable_indexes: for document in self.__documents_dict[searchable_index]: if doc_type: if isinstance( doc_type, list) and document.get('_type') not in doc_type: continue if isinstance(doc_type, str) and document.get('_type') != doc_type: continue matches.append(document) result = { 'hits': { 'total': len(matches), 'max_score': 1.0 }, '_shards': { # Simulate indexes with 1 shard each 'successful': len(searchable_indexes), 'failed': 0, 'total': len(searchable_indexes) }, 'took': 1, 'timed_out': False } hits = [] for match in matches: match['_score'] = 1.0 hits.append(match) # build aggregations if body is not None and 'aggs' in body: aggregations = {} for aggregation, definition in body['aggs'].items(): aggregations[aggregation] = { "doc_count_error_upper_bound": 0, "sum_other_doc_count": 0, "buckets": [] } if aggregations: result['aggregations'] = aggregations if 'scroll' in params: result['_scroll_id'] = str(get_random_scroll_id()) params['size'] = int( params.get('size') if 'size' in params else 10) params['from'] = int( params.get('from') + params.get('size') if 'from' in params else 0) self.__scrolls[result.get('_scroll_id')] = { 'index': index, 'doc_type': doc_type, 'body': body, 'params': params } hits = hits[params.get('from'):params.get('from') + params.get('size')] result['hits']['hits'] = hits return result
def search(self, index=None, doc_type=None, body=None, params=None, headers=None): searchable_indexes = self._normalize_index_to_list(index) matches = [] conditions = [] if body and 'query' in body: query = body['query'] for query_type_str, condition in query.items(): conditions.append( self._get_fake_query_condition(query_type_str, condition)) for searchable_index in searchable_indexes: for document in self.__documents_dict[searchable_index]: if doc_type: if isinstance( doc_type, list) and document.get('_type') not in doc_type: continue if isinstance(doc_type, str) and document.get('_type') != doc_type: continue if conditions: for condition in conditions: if condition.evaluate(document): matches.append(document) break else: matches.append(document) for match in matches: self._find_and_convert_data_types(match['_source']) result = { 'hits': { 'total': { 'value': len(matches), 'relation': 'eq' }, 'max_score': 1.0 }, '_shards': { # Simulate indexes with 1 shard each 'successful': len(searchable_indexes), 'skipped': 0, 'failed': 0, 'total': len(searchable_indexes) }, 'took': 1, 'timed_out': False } hits = [] for match in matches: match['_score'] = 1.0 hits.append(match) # build aggregations if body is not None and 'aggs' in body: aggregations = {} for aggregation, definition in body['aggs'].items(): aggregations[aggregation] = { "doc_count_error_upper_bound": 0, "sum_other_doc_count": 0, "buckets": self.make_aggregation_buckets(definition, matches) } if aggregations: result['aggregations'] = aggregations if body is not None and 'sort' in body: for key, value in body['sort'][0].items(): if body['sort'][0][key]['order'] == 'desc': hits = sorted(hits, key=lambda k: k['_source'][key], reverse=True) else: hits = sorted(hits, key=lambda k: k['_source'][key]) if 'from' in body and 'size' in body and body['from'] + body[ 'size'] > 0: hits = hits[body['from']:body['from'] + body['size']] if 'scroll' in params: result['_scroll_id'] = str(get_random_scroll_id()) params['size'] = int(params.get('size', 10)) params['from'] = int( params.get('from') + params.get('size') if 'from' in params else 0) self.__scrolls[result.get('_scroll_id')] = { 'index': index, 'doc_type': doc_type, 'body': body, 'params': params } hits = hits[params.get('from'):params.get('from') + params.get('size')] elif 'size' in params: hits = hits[:int(params['size'])] result['hits']['hits'] = hits return result