def _build_body_query(query_params, search_params, classes, search_fields, response_fields, analyzer=settings.ES_ANALYZER): patterns = search_params["pattern"].lower().split() patterns = [item for item in patterns if item != '-'] query_string = "* AND ".join(patterns) + "*" body = { "from": int(resources.calculate_offset(query_params)), "size": int(query_params.get("per_page", settings.DEFAULT_PER_PAGE)), "fields": response_fields, "query": { "bool": { "must": { "query_string": { "fields": search_fields, "query": query_string, "analyze_wildcard": True, "analyzer": analyzer } }, "should": { "query_string": { "fields": search_fields, "query": u'\"{0}\"'.format(query_string), "analyze_wildcard": True, "analyzer": "default" } } } }, "filter": _build_type_filters(classes) } return body
def _build_body_query_compatible_with_uatu_and_es_19_in_envs( query_params, tokens, classes, search_fields, response_fields, pattern): should_list = [] for token in tokens: token_item = token["token"] should_item = { "query_string": { "query": '"{0}"'.format(token_item), "fields": search_fields } } should_list.append(should_item) pattern = "*".join(pattern.split()).lower() for field in search_fields: should_item = {"wildcard": {str(field): "{0}*".format(pattern)}} should_list.append(should_item) body = { "from": int(resources.calculate_offset(query_params)), "size": int(query_params.get("per_page", settings.DEFAULT_PER_PAGE)), "fields": response_fields, "query": { "bool": { "should": should_list, "minimum_should_match": len(tokens) }, }, "filter": _build_type_filters(classes) } return body
def _build_body_query_compatible_with_uatu_and_es_19_in_envs(query_params, tokens, classes, search_fields, response_fields, pattern): should_list = [] for token in tokens: token_item = token["token"] should_item = { "query_string": { "query": '"{0}"'.format(token_item), "fields": search_fields } } should_list.append(should_item) pattern = "*".join(pattern.split()).lower() for field in search_fields: should_item = {"wildcard": {str(field): "{0}*".format(pattern)}} should_list.append(should_item) body = { "from": int(resources.calculate_offset(query_params)), "size": int(query_params.get("per_page", settings.DEFAULT_PER_PAGE)), "fields": response_fields, "query": { "bool": { "should": should_list, "minimum_should_match": len(tokens) }, }, "filter": _build_type_filters(classes) } return body
def do_search_query(query_params, search_fields, analyzer=settings.ES_ANALYZER): ELASTICSEARCH_QUERY_DICT = { "filter": { "type": { "value": query_params["class_uri"] } }, "query": { "multi_match": { "query": "{0}".format(query_params["pattern"]), "fields": search_fields, "analyzer": analyzer, "fuzziness": 0.7 # based on manual tests }, }, "from": int(resources.calculate_offset(query_params)), "size": int(query_params.get("per_page", settings.DEFAULT_PER_PAGE)), } indexes = ["semantica." + uri_to_slug(query_params["graph_uri"])] elasticsearch_result = run_search(ELASTICSEARCH_QUERY_DICT, indexes=indexes) return elasticsearch_result
def get_stored_queries(params): offset = int(calculate_offset(params)) per_page = int(params.get("per_page", DEFAULT_PER_PAGE)) stored_queries_result = get_all_instances_from_type(ES_INDEX_NAME, ES_TYPE_NAME, offset, per_page) response_dict = _get_response_dict(stored_queries_result, params) return response_dict
def query_classes_list(query_params): offset = calculate_offset(query_params) query_params['offset'] = offset query = QUERY_ALL_CLASSES_OF_A_GRAPH % query_params del query_params['offset'] return triplestore.query_sparql(query, query_params.triplestore_config)
def test_offset_calculation(self, mocked_settings): handler = MockHandler() params = ParamDict(handler, page=3, per_page=5) response = calculate_offset(params) expected = '15' self.assertEqual(expected, response)
def test_offset_defaults(self, mocked_settings): handler = MockHandler() params = ParamDict(handler) response = calculate_offset(params) expected = '20' self.assertEqual(expected, response)
def offset(self): return calculate_offset(self.params)