def alert_query(alert): """Execute the query and return a set of results.""" q = text_query(alert.query_text) q = authz_sources_filter(q) if alert.entity_id: q = filter_query(q, [('entities.uuid', alert.entity_id)], OR_FIELDS) if alert.notified_at: q = add_filter(q, {"range": {"created_at": {"gt": alert.notified_at}}}) q = {'query': q, 'size': 150} result, hits, output = execute_basic(TYPE_DOCUMENT, q) sub_queries = [] sources = {} for doc in hits.get('hits', []): document = doc.get('_source') document['id'] = int(doc.get('_id')) source_id = document['source_id'] if source_id not in sources: sources[source_id] = Source.by_id(source_id) if sources[source_id] is None: continue document['source'] = sources[source_id] document['records'] = {'results': [], 'total': 0} sq = records_query(document['id'], alert.to_query(), size=1) if sq is not None: sub_queries.append(json.dumps({})) sub_queries.append(json.dumps(sq)) output['results'].append(document) run_sub_queries(output, sub_queries) return output
def execute_documents_query(args, q): """ Execute the query and return a set of results. """ result = es.search(index=es_index, doc_type=TYPE_DOCUMENT, body=q) hits = result.get('hits', {}) output = { 'status': 'ok', 'results': [], 'offset': q['from'], 'limit': q['size'], 'total': hits.get('total'), 'next': None, 'facets': {}, 'watchlists': {} } convert_aggregations(result, output, args) next_offset = output['offset'] + output['limit'] if output['total'] > next_offset: params = {'offset': next_offset} for k, v in args.iterlists(): if k in ['offset']: continue params[k] = v output['next'] = url_for('search.query', **params) sub_queries = [] for doc in hits.get('hits', []): document = doc.get('_source') document['id'] = int(doc.get('_id')) document['score'] = doc.get('_score') document['records'] = {'results': [], 'total': 0} sq = records_query(document['id'], args) if sq is not None: sub_queries.append(json.dumps({})) sub_queries.append(json.dumps(sq)) document['api_url'] = url_for('document.view', document_id=doc.get('_id')) document['data_url'] = url_for('document.file', document_id=doc.get('_id')) output['results'].append(document) if len(sub_queries): res = es.msearch(index=es_index, doc_type=TYPE_RECORD, body='\n'.join(sub_queries)) for doc in output['results']: for sq in res.get('responses', []): sqhits = sq.get('hits', {}) for hit in sqhits.get('hits', {}): record = hit.get('_source') if doc['id'] != record.get('document_id'): continue record['score'] = hit.get('_score') record['text'] = hit.get('highlight', {}).get('text') doc['records']['results'].append(record) doc['records']['total'] = sqhits.get('total', 0) return output
def alert_query(alert): """Execute the query and return a set of results.""" q = text_query(alert.query_text) q = authz_filter(q) if alert.entity_id: q = filter_query(q, [('entities.id', alert.entity_id)], OR_FIELDS) if alert.notified_at: q = add_filter(q, { "range": { "created_at": { "gt": alert.notified_at } } }) q = { 'query': q, 'size': 150 } result, hits, output = execute_basic(TYPE_DOCUMENT, q) sub_queries = [] collections = {} for doc in hits.get('hits', []): document = doc.get('_source') document['id'] = int(doc.get('_id')) document['collections'] = [] for coll in document['collection_id']: if coll not in authz.collections(authz.READ): continue if coll not in collections: collections[coll] = Collection.by_id(coll) if collections[coll] is None: continue document['collections'].append(collections[coll]) document['records'] = {'results': [], 'total': 0} sq = records_query(document['id'], alert.to_query(), size=1) if sq is not None: sub_queries.append(json.dumps({})) sub_queries.append(json.dumps(sq)) output['results'].append(document) run_sub_queries(output, sub_queries) return output
def alert_query(alert): """Execute the query and return a set of results.""" q = text_query(alert.query_text) q = authz_sources_filter(q) if alert.entity_id: q = filter_query(q, [('entities.uuid', alert.entity_id)], OR_FIELDS) if alert.notified_at: q = add_filter(q, { "range": { "created_at": { "gt": alert.notified_at } } }) q = { 'query': q, 'size': 150 } result, hits, output = execute_basic(TYPE_DOCUMENT, q) sub_queries = [] sources = {} for doc in hits.get('hits', []): document = doc.get('_source') document['id'] = int(doc.get('_id')) source_id = document['source_id'] if source_id not in sources: sources[source_id] = Source.by_id(source_id) if sources[source_id] is None: continue document['source'] = sources[source_id] document['records'] = {'results': [], 'total': 0} sq = records_query(document['id'], alert.to_query(), size=1) if sq is not None: sub_queries.append(json.dumps({})) sub_queries.append(json.dumps(sq)) output['results'].append(document) run_sub_queries(output, sub_queries) return output
def execute_documents_query(args, query): """Execute the query and return a set of results.""" result, hits, output = execute_basic(TYPE_DOCUMENT, query) convert_document_aggregations(result, output, args) sub_queries = [] for doc in hits.get('hits', []): document = doc.get('_source') document['id'] = int(doc.get('_id')) document['score'] = doc.get('_score') document['records'] = {'results': [], 'total': 0} sq = records_query(document['id'], args) if sq is not None: sub_queries.append(json.dumps({})) sub_queries.append(json.dumps(sq)) document['api_url'] = url_for('documents_api.view', document_id=doc.get('_id')) document['data_url'] = url_for('documents_api.file', document_id=doc.get('_id')) output['results'].append(document) run_sub_queries(output, sub_queries) return output
def execute_documents_alert_query(args, query): """Execute the query and return a set of results.""" if not isinstance(args, MultiDict): args = MultiDict(args) query['size'] = 50 result, hits, output = execute_basic(TYPE_DOCUMENT, query) convert_aggregations(result, output, args) sub_queries = [] for doc in hits.get('hits', []): document = doc.get('_source') document['id'] = int(doc.get('_id')) for source in output['sources']['values']: if source['id'] == document['source_id']: document['source'] = source document['records'] = {'results': [], 'total': 0} sq = records_query(document['id'], args, size=1) if sq is not None: sub_queries.append(json.dumps({})) sub_queries.append(json.dumps(sq)) output['results'].append(document) run_sub_queries(output, sub_queries) return output
def execute_documents_query(args, query): """Execute the query and return a set of results.""" result, hits, output = execute_basic(TYPE_DOCUMENT, query) convert_aggregations(result, output, args) sub_queries = [] for doc in hits.get('hits', []): document = doc.get('_source') document['id'] = int(doc.get('_id')) document['score'] = doc.get('_score') document['records'] = {'results': [], 'total': 0} sq = records_query(document['id'], args) if sq is not None: sub_queries.append(json.dumps({})) sub_queries.append(json.dumps(sq)) document['api_url'] = url_for('documents_api.view', document_id=doc.get('_id')) document['data_url'] = url_for('documents_api.file', document_id=doc.get('_id')) output['results'].append(document) run_sub_queries(output, sub_queries) return output