Example #1
0
def event_stream(sketch_id, query):
    es = ElasticsearchDataStore(host=current_app.config[u'ELASTIC_HOST'],
                                port=current_app.config[u'ELASTIC_PORT'])
    sketch = Sketch.query.get(sketch_id)
    if not sketch:
        sys.exit('No such sketch')
    indices = {t.searchindex.index_name for t in sketch.timelines}

    result = es.search(sketch_id=sketch_id,
                       query_string=query,
                       query_filter={u'limit': 10000},
                       query_dsl={},
                       indices=[u'_all'],
                       return_fields=[u'xml_string', u'timestamp'],
                       enable_scroll=True)

    scroll_id = result[u'_scroll_id']
    scroll_size = result[u'hits'][u'total']

    for event in result[u'hits'][u'hits']:
        yield event

    while scroll_size > 0:
        result = es.client.scroll(scroll_id=scroll_id, scroll=u'1m')
        scroll_id = result[u'_scroll_id']
        scroll_size = len(result[u'hits'][u'hits'])
        for event in result[u'hits'][u'hits']:
            yield event
Example #2
0
def event_stream(sketch_id, query):
    es = ElasticsearchDataStore(
        host=current_app.config['ELASTIC_HOST'],
        port=current_app.config['ELASTIC_PORT'])
    sketch = Sketch.query.get(sketch_id)
    if not sketch:
        sys.exit('No such sketch')
    indices = {t.searchindex.index_name for t in sketch.timelines}

    result = es.search(
        sketch_id=sketch_id,
        query_string=query,
        query_filter={'size': 10000, 'terminate_after': 1000},
        query_dsl={},
        indices=['_all'],
        return_fields=['xml_string', 'timestamp'],
        enable_scroll=True)

    scroll_id = result['_scroll_id']
    scroll_size = result['hits']['total']

    for event in result['hits']['hits']:
        yield event

    while scroll_size > 0:
        result = es.client.scroll(scroll_id=scroll_id, scroll='1m')
        scroll_id = result['_scroll_id']
        scroll_size = len(result['hits']['hits'])
        for event in result['hits']['hits']:
            yield event
Example #3
0
def export(sketch_id):
    """Generates CSV from search result.

    Args:
        sketch_id: Primary key for a sketch.
    Returns:
        CSV string with header.
    """
    sketch = Sketch.query.get_with_acl(sketch_id)
    view = sketch.get_user_view(current_user)
    query_filter = json.loads(view.query_filter)
    query_dsl = json.loads(view.query_dsl)
    indices = query_filter.get(u'indices', [])

    datastore = ElasticsearchDataStore(
        host=current_app.config[u'ELASTIC_HOST'],
        port=current_app.config[u'ELASTIC_PORT'])

    result = datastore.search(
        sketch_id, view.query_string, query_filter, query_dsl, indices,
        aggregations=None, return_results=True)

    csv_out = StringIO()
    csv_writer = csv.DictWriter(
        csv_out, fieldnames=[
            u'timestamp', u'message', u'timestamp_desc', u'datetime',
            u'timesketch_label', u'tag'])
    csv_writer.writeheader()
    for _event in result[u'hits'][u'hits']:
        csv_writer.writerow(
            dict((k, v.encode(u'utf-8') if isinstance(v, basestring) else v)
                 for k, v in _event[u'_source'].iteritems()))

    return csv_out.getvalue()
Example #4
0
def export(sketch_id):
    """Generates CSV from search result.

    Args:
        sketch_id: Primary key for a sketch.
    Returns:
        CSV string with header.
    """
    sketch = Sketch.query.get_with_acl(sketch_id)
    view = sketch.get_user_view(current_user)
    query_filter = json.loads(view.query_filter)
    query_dsl = json.loads(view.query_dsl)
    indices = query_filter.get('indices', [])

    # Export more than the 500 first results.
    max_events_to_fetch = 10000
    query_filter['terminate_after'] = max_events_to_fetch
    query_filter['size'] = max_events_to_fetch

    datastore = ElasticsearchDataStore(
        host=current_app.config['ELASTIC_HOST'],
        port=current_app.config['ELASTIC_PORT'])

    result = datastore.search(
        sketch_id,
        view.query_string,
        query_filter,
        query_dsl,
        indices,
        aggregations=None)

    all_fields = set()
    for event in result['hits']['hits']:
        all_fields.update(event['_source'].keys())

    all_fields.difference_update(DEFAULT_FIELDS)
    fieldnames = DEFAULT_FIELDS + sorted(all_fields)

    csv_out = StringIO()
    csv_writer = csv.DictWriter(csv_out, fieldnames=fieldnames)
    csv_writer.writeheader()
    for _event in result['hits']['hits']:
        sources = _event['_source']
        row = {}
        for key, value in iter(sources.items()):
            if isinstance(value, six.binary_type):
                value = codecs.decode(value, 'utf-8')
            row[key] = value
        row['_index'] = _event['_index']
        if isinstance(row['_index'], six.binary_type):
            row['_index'] = row['_index'].encode('utf-8')
        csv_writer.writerow(row)

    return csv_out.getvalue()
Example #5
0
def export(sketch_id):
    """Generates CSV from search result.

    Args:
        sketch_id: Primary key for a sketch.
    Returns:
        CSV string with header.
    """
    sketch = Sketch.query.get_with_acl(sketch_id)
    view = sketch.get_user_view(current_user)
    query_filter = json.loads(view.query_filter)
    query_dsl = json.loads(view.query_dsl)
    indices = query_filter.get('indices', [])

    # Export more than the 500 first results.
    max_events_to_fetch = 10000
    query_filter['terminate_after'] = max_events_to_fetch
    query_filter['size'] = max_events_to_fetch

    datastore = ElasticsearchDataStore(
        host=current_app.config['ELASTIC_HOST'],
        port=current_app.config['ELASTIC_PORT'])

    result = datastore.search(
        sketch_id,
        view.query_string,
        query_filter,
        query_dsl,
        indices,
        aggregations=None)

    all_fields = set()
    for event in result['hits']['hits']:
        all_fields.update(event['_source'].keys())

    all_fields.difference_update(DEFAULT_FIELDS)
    fieldnames = DEFAULT_FIELDS + sorted(all_fields)

    csv_out = StringIO()
    csv_writer = csv.DictWriter(csv_out, fieldnames=fieldnames)
    csv_writer.writeheader()
    for _event in result['hits']['hits']:
        sources = _event['_source']
        row = {}
        for key, value in iter(sources.items()):
            if isinstance(value, six.binary_type):
                value = codecs.decode(value, 'utf-8')
            row[key] = value
        row['_index'] = _event['_index']
        if isinstance(row['_index'], six.binary_type):
            row['_index'] = row['_index'].encode('utf-8')
        csv_writer.writerow(row)

    return csv_out.getvalue()
Example #6
0
def export(sketch_id):
    """Generates CSV from search result.

    Args:
        sketch_id: Primary key for a sketch.
    Returns:
        CSV string with header.
    """
    sketch = Sketch.query.get_with_acl(sketch_id)
    view = sketch.get_user_view(current_user)
    query_filter = json.loads(view.query_filter)
    query_dsl = json.loads(view.query_dsl)
    indices = query_filter.get(u'indices', [])

    # Export more than the 500 first results.
    max_events_to_fetch = 10000
    query_filter[u'limit'] = max_events_to_fetch

    datastore = ElasticsearchDataStore(
        host=current_app.config[u'ELASTIC_HOST'],
        port=current_app.config[u'ELASTIC_PORT'])

    result = datastore.search(
        sketch_id,
        view.query_string,
        query_filter,
        query_dsl,
        indices,
        aggregations=None)

    all_fields = set()
    for event in result[u'hits'][u'hits']:
        all_fields.update(event[u'_source'].keys())

    all_fields.difference_update(DEFAULT_FIELDS)
    fieldnames = DEFAULT_FIELDS + sorted(all_fields)

    csv_out = StringIO()
    csv_writer = csv.DictWriter(csv_out, fieldnames=fieldnames)
    csv_writer.writeheader()
    for _event in result[u'hits'][u'hits']:
        row = dict((k, v.encode(u'utf-8') if isinstance(v, basestring) else v)
                   for k, v in _event[u'_source'].iteritems())
        row[u'_index'] = _event[u'_index']
        if isinstance(row[u'_index'], basestring):
            row[u'_index'] = row[u'_index'].encode(u'utf-8')
        csv_writer.writerow(row)

    return csv_out.getvalue()