コード例 #1
0
ファイル: flowd_aggregate.py プロジェクト: open-sense/core
def aggregate_flowd():
    """ aggregate collected flowd data
    :return: None
    """
    # init metadata (progress maintenance)
    metadata = AggMetadata()

    # register aggregate classes to stream data to
    stream_agg_objects = list()
    resolutions = [60, 60*5]
    for agg_class in lib.aggregates.get_aggregators():
        for resolution in agg_class.resolutions():
            stream_agg_objects.append(agg_class(resolution))

    # parse flow data and stream to registered consumers
    prev_recv=metadata.last_sync()
    for flow_record in parse_flow(prev_recv):
        if flow_record is None or prev_recv != flow_record['recv']:
            # commit data on receive timestamp change or last record
            for stream_agg_object in stream_agg_objects:
                stream_agg_object.commit()
            metadata.update_sync_time(prev_recv)
        if flow_record is not None:
            # send to aggregator
            for stream_agg_object in stream_agg_objects:
                stream_agg_object.add(flow_record)
            prev_recv = flow_record['recv']

    # expire old data
    for stream_agg_object in stream_agg_objects:
        stream_agg_object.cleanup()
        del stream_agg_object
    del metadata
コード例 #2
0
ファイル: flowd_aggregate.py プロジェクト: enriqRosa/opnsense
def aggregate_flowd(config, do_vacuum=False):
    """ aggregate collected flowd data
    :param config: script configuration
    :param do_vacuum: vacuum database after cleanup
    :return: None
    """
    # init metadata (progress maintenance)
    metadata = AggMetadata(config.database_dir)

    # register aggregate classes to stream data to
    stream_agg_objects = list()
    for agg_class in lib.aggregates.get_aggregators():
        for resolution in agg_class.resolutions():
            stream_agg_objects.append(
                agg_class(resolution, config.database_dir))

    # parse flow data and stream to registered consumers
    prev_recv = metadata.last_sync()
    commit_record_count = 0
    for flow_record in parse_flow(prev_recv, config.flowd_source):
        if flow_record is None or (prev_recv != flow_record['recv']
                                   and commit_record_count > 100000):
            # commit data on receive timestamp change or last record
            for stream_agg_object in stream_agg_objects:
                stream_agg_object.commit()
                commit_record_count = 0
            metadata.update_sync_time(prev_recv)
        if flow_record is not None:
            # send to aggregator
            for stream_agg_object in stream_agg_objects:
                # class add() may change the flow contents for processing, its better to isolate
                # parameters here.
                stream_agg_object.add(copy.copy(flow_record))
            commit_record_count += 1
            prev_recv = flow_record['recv']

    # expire old data
    for stream_agg_object in stream_agg_objects:
        stream_agg_object.cleanup(do_vacuum)
        del stream_agg_object
    del metadata
コード例 #3
0
ファイル: flowd_aggregate.py プロジェクト: zvs44/core
def aggregate_flowd(do_vacuum=False):
    """ aggregate collected flowd data
    :param do_vacuum: vacuum database after cleanup
    :return: None
    """
    # init metadata (progress maintenance)
    metadata = AggMetadata()

    # register aggregate classes to stream data to
    stream_agg_objects = list()
    for agg_class in lib.aggregates.get_aggregators():
        for resolution in agg_class.resolutions():
            stream_agg_objects.append(agg_class(resolution))

    # parse flow data and stream to registered consumers
    prev_recv = metadata.last_sync()
    commit_record_count = 0
    for flow_record in parse_flow(prev_recv):
        if flow_record is None or (prev_recv != flow_record['recv'] and commit_record_count > 100000):
            # commit data on receive timestamp change or last record
            for stream_agg_object in stream_agg_objects:
                stream_agg_object.commit()
            metadata.update_sync_time(prev_recv)
        if flow_record is not None:
            # send to aggregator
            for stream_agg_object in stream_agg_objects:
                # class add() may change the flow contents for processing, its better to isolate
                # paremeters here.
                flow_record_cpy = copy.copy(flow_record)
                stream_agg_object.add(flow_record_cpy)
            commit_record_count += 1
            prev_recv = flow_record['recv']

    # expire old data
    for stream_agg_object in stream_agg_objects:
        stream_agg_object.cleanup(do_vacuum)
        del stream_agg_object
    del metadata
コード例 #4
0
    ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
    POSSIBILITY OF SUCH DAMAGE.

    --------------------------------------------------------------------------------------
    fetch flowd aggregate metadata
"""
import sys
import ujson
import datetime
from lib.aggregate import AggMetadata
import lib.aggregates

result = dict()

# load global metadata
metadata = AggMetadata()
result['last_sync'] = metadata.last_sync()
# fetch aggregators
result['aggregators'] = dict()
for agg_class in lib.aggregates.get_aggregators():
    result['aggregators'][agg_class.__name__] = {'resolutions': list()}
    for resolution in agg_class.resolutions():
        result['aggregators'][agg_class.__name__]['resolutions'].append(
            resolution)

# output result
if len(sys.argv) > 1 and 'json' in sys.argv:
    # json format
    print(ujson.dumps(result))
else:
    # plain text format
コード例 #5
0
import lib.aggregates

result = dict()

parser = argparse.ArgumentParser()
parser.add_argument('--config',
                    '--config',
                    help='configuration yaml',
                    default=None)
parser.add_argument('format', help='output format [text (default)|json]')
cmd_args = parser.parse_args()

configuration = load_config(cmd_args.config)

# load global metadata
metadata = AggMetadata(database_dir=configuration.database_dir)
result['last_sync'] = metadata.last_sync()
# fetch aggregators
result['aggregators'] = dict()
for agg_class in lib.aggregates.get_aggregators():
    result['aggregators'][agg_class.__name__] = {'resolutions': list()}
    for resolution in agg_class.resolutions():
        result['aggregators'][agg_class.__name__]['resolutions'].append(
            resolution)

# output result
if cmd_args.format.find('json') > -1:
    # json format
    print(ujson.dumps(result))
else:
    # plain text format
コード例 #6
0
import argparse
from lib import load_config
from lib.aggregate import AggMetadata
import lib.aggregates

result = dict()

parser = argparse.ArgumentParser()
parser.add_argument('--config', '--config', help='configuration yaml', default=None)
parser.add_argument('format',  help='output format [text (default)|json]')
cmd_args = parser.parse_args()

configuration = load_config(cmd_args.config)

# load global metadata
metadata = AggMetadata(database_dir=configuration.database_dir)
result['last_sync'] = metadata.last_sync()
# fetch aggregators
result['aggregators'] = dict()
for agg_class in lib.aggregates.get_aggregators():
    result['aggregators'][agg_class.__name__] = {'resolutions': list()}
    for resolution in agg_class.resolutions():
        result['aggregators'][agg_class.__name__]['resolutions'].append(resolution)

# output result
if cmd_args.format.find('json') > -1:
    # json format
    print(ujson.dumps(result))
else:
    # plain text format
    print (