Exemplo n.º 1
0
 def __init__(self, outputqueue, config):
     multiprocessing.Process.__init__(self)
     # All the printing output should be sent to the outputqueue. The outputqueue is connected to another process called OutputProcess
     self.outputqueue = outputqueue
     # In case you need to read the slips.conf configuration file for your own configurations
     self.config = config
     # Start the DB
     __database__.start(self.config)
     # Set the output queue of our database instance
     __database__.setOutputQueue(self.outputqueue)
     # Open the maminddb offline db
     try:
         self.reader = maxminddb.open_database(
             'modules/asn/GeoLite2-ASN.mmdb')
     except:
         self.print(
             'Error opening the geolite2 db in ./GeoLite2-Country_20190402/GeoLite2-Country.mmdb. Please download it from https://geolite.maxmind.com/download/geoip/database/GeoLite2-Country.tar.gz. Please note it must be the MaxMind DB version.'
         )
     # To which channels do you wnat to subscribe? When a message arrives on the channel the module will wakeup
     self.c1 = __database__.subscribe('new_ip')
     # Set the timeout based on the platform. This is because the pyredis lib does not have officially recognized the timeout=None as it works in only macos and timeout=-1 as it only works in linux
     if platform.system() == 'Darwin':
         # macos
         self.timeout = None
     elif platform.system() == 'Linux':
         # linux
         self.timeout = None
     else:
         #??
         self.timeout = None
Exemplo n.º 2
0
    def __init__(self, outputqueue, config):
        multiprocessing.Process.__init__(self)
        # All the printing output should be sent to the outputqueue. The outputqueue is connected to another process called OutputProcess
        self.outputqueue = outputqueue
        # In case you need to read the slips.conf configuration file for your own configurations
        self.config = config
        # Start the DB
        __database__.start(self.config)
        # Subscribe to the channel
        self.c1 = __database__.subscribe('new_flow')
        self.fieldseparator = __database__.getFieldSeparator()
        # Set the output queue of our database instance
        __database__.setOutputQueue(self.outputqueue)
        # Read the configuration
        self.read_configuration()
        # To know when to retrain. We store the number of labels when we last retrain
        self.retrain = 0

        if platform.system() == 'Darwin':
            # macos
            self.timeout = None
        elif platform.system() == 'Linux':
            # linux
            self.timeout = None
        else:
            # ??
            self.timeout = None
 def __init__(self, outputqueue, config):
     multiprocessing.Process.__init__(self)
     self.outputqueue = outputqueue
     self.config = config
     # Start the DB
     __database__.start(self.config)
     # Set the output queue of our database instance
     __database__.setOutputQueue(self.outputqueue)
     # Get from the database the separator used to separate the IP and the word profile
     self.fieldseparator = __database__.getFieldSeparator()
     # To which channels do you wnat to subscribe? When a message arrives on the channel the module will wakeup
     self.c1 = __database__.subscribe('tw_modified')
     # We need to know that after a detection, if we receive another flow that does not modify the count for the detection, we are not
     # re-detecting again only becase the threshold was overcomed last time.
     self.cache_det_thresholds = {}
     # Set the timeout based on the platform. This is because the pyredis lib does not have officially recognized the timeout=None as it works in only macos and timeout=-1 as it only works in linux
     if platform.system() == 'Darwin':
         # macos
         self.timeout = None
     elif platform.system() == 'Linux':
         self.timeout = None
     else:
         #??
         self.timeout = None
     self.separator = '_'
def init_experiment(base_dir, exp_id, exp_suffix=""):
    config = get_default_config()
    output_process_queue = Queue()
    output_process_thread = OutputProcess(output_process_queue, 1, 1, config)
    output_process_thread.start()

    # Start the DB
    __database__.start(config)
    __database__.setOutputQueue(output_process_queue)

    exp_dir = base_dir + str(exp_id) + exp_suffix + "/"

    if not os.path.exists(exp_dir):
        os.mkdir(exp_dir)

    return config, output_process_queue, output_process_thread, exp_dir
Exemplo n.º 5
0
    def __init__(self, inputqueue, outputqueue, verbose, debug, config):
        self.name = 'Logs'
        multiprocessing.Process.__init__(self)
        self.verbose = verbose
        self.debug = debug
        self.config = config
        # Start the DB
        __database__.start(self.config)
        self.separator = '_'
        # From the config, read the timeout to read logs. Now defaults to 5 seconds
        self.inputqueue = inputqueue
        self.outputqueue = outputqueue
        # Read the configuration
        self.read_configuration()
        self.fieldseparator = __database__.getFieldSeparator()
        # For some weird reason the database loses its outputqueue and we have to re set it here.......
        __database__.setOutputQueue(self.outputqueue)

        self.timeline_first_index = {}
        self.is_timline_file = False