forked from sburnett/bismark-passive-server
-
Notifications
You must be signed in to change notification settings - Fork 0
/
harnesses.py
90 lines (82 loc) · 4 KB
/
harnesses.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
#!/usr/bin/env python
import errno
from optparse import OptionParser
from os import makedirs
from os.path import join
from process_sessions import process_sessions
from byte_count_processor import ByteCountProcessorCoordinator
from correlation_processor import CorrelationProcessorCoordinator
from domains_per_flow_processor import DomainsPerFlowProcessorCoordinator
from flow_statistics_processor import FlowStatisticsProcessorCoordinator
from ip_counts_processor import IpCountsProcessorCoordinator
from packet_size_processor import PacketSizeProcessorCoordinator
from update_statistics_processor import UpdateStatisticsProcessorCoordinator
# Add new processing harnesses here. Keep the names in alphabetical order.
# Coordinators are called in the given order once per update file.
harnesses = {
'dashboard': [CorrelationProcessorCoordinator,
DomainsPerFlowProcessorCoordinator,
ByteCountProcessorCoordinator],
'flow_statistics': [CorrelationProcessorCoordinator,
DomainsPerFlowProcessorCoordinator,
FlowStatisticsProcessorCoordinator],
'ip_counts': [CorrelationProcessorCoordinator,
IpCountsProcessorCoordinator],
'packet_size': [CorrelationProcessorCoordinator,
PacketSizeProcessorCoordinator],
'updates': [UpdateStatisticsProcessorCoordinator],
}
def parse_coordinator_args(parser):
"""Add arguments for your custom coordinator here. Keep arguments in
alphabetical order. Don't use short options in this function."""
parser.add_option('--db_filename', action='store', dest='db_filename',
help='Sqlite database filename')
parser.add_option('--db_name', action='store', dest='db_name',
default='bismark_openwrt_live_v0_1',
help='Database name')
parser.add_option('--db_user', action='store', dest='db_user',
default='sburnett', help='Database username')
parser.add_option('--db_rebuild', action='store_true',
dest='db_rebuild', default=False,
help='Rebuild database from scratch (advanced)')
def parse_args():
"""Don't add coordinator-specific options to this funciton."""
usage = 'usage: %prog [options]' \
' harness updates_directory index_filename pickles_directory'
parser = OptionParser(usage=usage)
parser.add_option('-t', '--temp-pickles-dir', action='store',
dest='temp_pickles_dir', default='/dev/shm',
help='Directory for temporary runtime pickle storage')
parser.add_option('-w', '--workers', type='int', action='store',
dest='workers', default=None,
help='Maximum number of worker threads to use')
parser.add_option('-n', '--disable-refresh', action='store_true',
dest='disable_refresh', default=False,
help='Disable refresh of index before processing')
parse_coordinator_args(parser)
options, args = parser.parse_args()
if len(args) != 4:
parser.error('Missing required option')
mandatory = { 'harness': args[0],
'updates_directory': args[1],
'index_filename': args[2],
'pickles_directory': args[3] }
return options, mandatory
def main():
(options, args) = parse_args()
pickles_path = join(args['pickles_directory'], args['harness'])
try:
makedirs(pickles_path)
except OSError, e:
if e.errno != errno.EEXIST:
raise
coordinators = map(lambda cl: cl(options), harnesses[args['harness']])
process_sessions(coordinators,
args['updates_directory'],
args['index_filename'],
pickles_path,
options.temp_pickles_dir,
options.workers,
not options.disable_refresh)
if __name__ == '__main__':
main()