示例#1
0
class RuleEngine(Process):

    def __init__(self, config):
        Process.__init__(self)
        self.name = 'RuleEngine'
        self.config = config
        self.database = DatabaseHandler(self.config.name)
        self.logutil = KBLogUtil(self.config.name, self.name)
        self.wids = WIDSClient(self.config.server_ip, self.config.server_port)
        self.active = None
        self.rules = []

        #///dev///
        self.rules.append(rule1)
        #////////

    def run(self):
        self.logutil.log('Starting Execution')        
        self.active = True
        self.start_time = dateToMicro(datetime.utcnow())

        while self.active:
    
            if DEV_DEBUG:
                self.logutil.debug('Checking for new rules')
                time.sleep(3)

            # check server for new rules: 'GET /rules/updatecheck', if so load new rules
            '''
            if self.wids.checkNewRules():
                new_rules = self.wids.getNewRules()
            '''

            # evaluate each rule serially
            self.logutil.debug('Evaluating rules')
            for RuleObject in self.rules:
                self.evaluateRule(RuleObject)

        self.logutil.log('Terminating Execution')


    # TODO - replace the internal database with a REST call to query database for events
    def evaluateRule(self, RuleObject):
        try:
            self.logutil.dev('Evaluating Rule: {0} (EventIndex: {1})'.format(RuleObject.name, RuleObject.event_index))
            for condition in RuleObject.conditions:
                module = condition[0]
                event  = condition[1]
                count  = condition[2]

                # TODO - replace this direct database query with REST call ????
                query = self.database.session.query(Event).filter(Event.module == module).filter(Event.name == event).filter(Event.datetime > self.start_time).filter(Event.id > RuleObject.event_index)

                results_count = query.limit(count).count()
                self.logutil.dev('Event: {0} - Found: {1} (Events Needed: {2})'.format(event, results_count, count))
                if not results_count >= count:
                    return False
                last_result = query.order_by(Event.id.desc()).limit(count).first()
                RuleObject.event_index = last_result.id
                self.logutil.log('>>> Rule Conditions Met ({0})'.format(RuleObject.name))
                for action in RuleObject.actions:
                    actionType   = action[0]
                    actionParams = action[1]
                    if actionType == 'GenerateAlert':
                        self.action_GenerateAlert(RuleObject.name,  actionParams)
                    if actionType == 'GenerateLog':
                        self.action_GenerateLog(RuleObject.name, actionParams)
        except Exception:
            traceback.print_exc() 
            
    def action_GenerateLog(self, rule_name, action_parameters):
        self.logutil.log('Execution GenerateLog Action for Rule {0}'.format(rule_name))
        pass

    def action_GenerateAlert(self, rule_name, action_parameters):
        self.logutil.log('Executing GenerateAlert Action for Rule {0}'.format(rule_name))
        self.wids.generateAlert(rule_name)

    def shutdown(self):
        self.active = False
        self.terminate()
class FilterProcess(Process):
    def __init__(self, pipe, task_queue, stopevent, task_update_event, drone, parent):
        super(FilterProcess, self).__init__()
        self.pipe = pipe
        self.task_queue = task_queue
        self.stopevent = stopevent
        self.taskevent = task_update_event

        self.drone = drone
        self.parent = parent
        self.name = '{0}.Filter'.format(self.parent)
        self.logutil = KBLogUtil(self.drone, self.name, None)
        self.callbacks = 0

    def do_callback(self, uuid, cburl, pkt):
        pkt['uuid'] = uuid
        pkt['datetime'] = dateToMicro(pkt['datetime'])
        pkt['bytes'] = base64.b64encode(pkt['bytes'])
        if 0 in pkt: del pkt[0] # Kill KillerBee's backwards compatible keys
        if 1 in pkt: del pkt[1]
        if 2 in pkt: del pkt[2]
        http_headers = {'Content-Type' : 'application/json', 'User-Agent' : 'Drone'}
        post_data_json = json.dumps({'uuid':uuid, 'pkt':pkt})
        post_object = urllib2.Request(cburl, post_data_json, http_headers)
        try:
            response = urllib2.urlopen(post_object)
            self.logutil.debug('Successful Data Upload task {0} data to: {1}'.format(uuid, cburl))
        except(IOError):
            self.logutil.debug('Failed Data Upload task {0} to: {1}'.format(uuid, cburl))
        self.callbacks += 1

    def run(self):
        '''
        This part runs in a separate process as invoked by Multiprocessing.
        It recieves packets from the SnifferProcess via a pipe, and compares
        them against the data currently in the filters Manager.
        '''
        self.logutil.log('Started')
        tasks = []
        while not self.stopevent.is_set():
        
            # check if there are new tasks and update
            while not self.task_queue.empty():
                self.logutil.debug('Detected Tasking Update in Queue')
                tasks = []
                pickleTaskDict = self.task_queue.get_nowait()
                for uuid,data in cPickle.loads(pickleTaskDict).items():
                    tasks.append( (uuid, data['filter'], data['callback']) )
                self.task_queue.task_done()
                self.logutil.log('Tasking Updated ({0} tasks total)'.format(len(tasks)))
           
            # get packet from sniffer and match against tasked filters 
            try:
                pkt = self.pipe.recv()
                self.logutil.debug('Received Packet: {0}'.format(pkt['bytes'].encode('hex')))
                self.logutil.dev('Filtering against {0} tasks ({1})'.format(len(tasks), list((x[0] for x in tasks))))
                # Do the basic filtering, and run the callback function on packets that match
                for (uuid, filt, cb) in tasks:
                    # We check to see if the tasking has each test, and
                    # if it does, we see if it meets the defined condition.
                    # If it does not meet a condition, fail out right away.
                    if 'size' in filt:
                        (minB, maxB) = filt['size']
                        pktB = len(pkt['bytes'])
                        if pktB < minB or pktB > maxB:
                            continue
                    if 'fcf' in filt:
                        (mask, val) = filt['fcf']
                        if (unpack('>H', pkt['bytes'][0:2])[0] & mask) != val:
                            continue
                    if 'byteoffset' in filt:
                        (offset, mask, val) = filt['byteoffset']
                        if offset >= len(pkt['bytes']):
                            continue
                        if (unpack('B', pkt['bytes'][offset])[0] & mask) != val:
                            continue
                    # The cases of:
                    # (a) no conditions, aka send all packets, and
                    # (b) unknown condition we don't have coded for
                    # Both come to here and cause a callback to occur
                    self.logutil.debug('Matched Packet against task: {0}'.format(uuid))
                    self.do_callback(uuid, cb, pkt)
            except Exception as e:
                traceback.print_exc()
                print "Sniffer pipe on the filter end received an IOError, OK at shutdown:", e
class FilterProcess(Process):
    def __init__(self, pipe, task_queue, stopevent, task_update_event, drone,
                 parent):
        super(FilterProcess, self).__init__()
        self.pipe = pipe
        self.task_queue = task_queue
        self.stopevent = stopevent
        self.taskevent = task_update_event

        self.drone = drone
        self.parent = parent
        self.name = '{0}.Filter'.format(self.parent)
        self.logutil = KBLogUtil(self.drone, self.name, None)
        self.callbacks = 0

    def do_callback(self, uuid, cburl, pkt):
        pkt['uuid'] = uuid
        pkt['datetime'] = dateToMicro(pkt['datetime'])
        pkt['bytes'] = base64.b64encode(pkt['bytes'])
        if 0 in pkt: del pkt[0]  # Kill KillerBee's backwards compatible keys
        if 1 in pkt: del pkt[1]
        if 2 in pkt: del pkt[2]
        http_headers = {
            'Content-Type': 'application/json',
            'User-Agent': 'Drone'
        }
        post_data_json = json.dumps({'uuid': uuid, 'pkt': pkt})
        post_object = urllib2.Request(cburl, post_data_json, http_headers)
        try:
            response = urllib2.urlopen(post_object)
            self.logutil.debug(
                'Successful Data Upload task {0} data to: {1}'.format(
                    uuid, cburl))
        except (IOError):
            self.logutil.debug('Failed Data Upload task {0} to: {1}'.format(
                uuid, cburl))
        self.callbacks += 1

    def run(self):
        '''
        This part runs in a separate process as invoked by Multiprocessing.
        It recieves packets from the SnifferProcess via a pipe, and compares
        them against the data currently in the filters Manager.
        '''
        self.logutil.log('Started')
        tasks = []
        while not self.stopevent.is_set():

            # check if there are new tasks and update
            while not self.task_queue.empty():
                self.logutil.debug('Detected Tasking Update in Queue')
                tasks = []
                pickleTaskDict = self.task_queue.get_nowait()
                for uuid, data in cPickle.loads(pickleTaskDict).items():
                    tasks.append((uuid, data['filter'], data['callback']))
                self.task_queue.task_done()
                self.logutil.log('Tasking Updated ({0} tasks total)'.format(
                    len(tasks)))

            # get packet from sniffer and match against tasked filters
            try:
                pkt = self.pipe.recv()
                self.logutil.debug('Received Packet: {0}'.format(
                    pkt['bytes'].encode('hex')))
                self.logutil.dev('Filtering against {0} tasks ({1})'.format(
                    len(tasks), list((x[0] for x in tasks))))
                # Do the basic filtering, and run the callback function on packets that match
                for (uuid, filt, cb) in tasks:
                    # We check to see if the tasking has each test, and
                    # if it does, we see if it meets the defined condition.
                    # If it does not meet a condition, fail out right away.
                    if 'size' in filt:
                        (minB, maxB) = filt['size']
                        pktB = len(pkt['bytes'])
                        if pktB < minB or pktB > maxB:
                            continue
                    if 'fcf' in filt:
                        (mask, val) = filt['fcf']
                        if (unpack('>H', pkt['bytes'][0:2])[0] & mask) != val:
                            continue
                    if 'byteoffset' in filt:
                        (offset, mask, val) = filt['byteoffset']
                        if offset >= len(pkt['bytes']):
                            continue
                        if (unpack('B', pkt['bytes'][offset])[0]
                                & mask) != val:
                            continue
                    # The cases of:
                    # (a) no conditions, aka send all packets, and
                    # (b) unknown condition we don't have coded for
                    # Both come to here and cause a callback to occur
                    self.logutil.debug(
                        'Matched Packet against task: {0}'.format(uuid))
                    self.do_callback(uuid, cb, pkt)
            except Exception as e:
                traceback.print_exc()
                print "Sniffer pipe on the filter end received an IOError, OK at shutdown:", e
示例#4
0
class RuleEngine(Process):
    def __init__(self, config):
        Process.__init__(self)
        self.name = 'RuleEngine'
        self.config = config
        self.database = DatabaseHandler(self.config.name)
        self.logutil = KBLogUtil(self.config.name, self.name)
        self.wids = WIDSClient(self.config.server_ip, self.config.server_port)
        self.active = None
        self.rules = []

        #///dev///
        self.rules.append(rule1)
        #////////

    def run(self):
        self.logutil.log('Starting Execution')
        self.active = True
        self.start_time = dateToMicro(datetime.utcnow())

        while self.active:

            if DEV_DEBUG:
                self.logutil.debug('Checking for new rules')
                time.sleep(3)

            # check server for new rules: 'GET /rules/updatecheck', if so load new rules
            '''
            if self.wids.checkNewRules():
                new_rules = self.wids.getNewRules()
            '''

            # evaluate each rule serially
            self.logutil.debug('Evaluating rules')
            for RuleObject in self.rules:
                self.evaluateRule(RuleObject)

        self.logutil.log('Terminating Execution')

    # TODO - replace the internal database with a REST call to query database for events
    def evaluateRule(self, RuleObject):
        try:
            self.logutil.dev('Evaluating Rule: {0} (EventIndex: {1})'.format(
                RuleObject.name, RuleObject.event_index))
            for condition in RuleObject.conditions:
                module = condition[0]
                event = condition[1]
                count = condition[2]

                # TODO - replace this direct database query with REST call ????
                query = self.database.session.query(Event).filter(
                    Event.module == module).filter(Event.name == event).filter(
                        Event.datetime > self.start_time).filter(
                            Event.id > RuleObject.event_index)

                results_count = query.limit(count).count()
                self.logutil.dev(
                    'Event: {0} - Found: {1} (Events Needed: {2})'.format(
                        event, results_count, count))
                if not results_count >= count:
                    return False
                last_result = query.order_by(
                    Event.id.desc()).limit(count).first()
                RuleObject.event_index = last_result.id
                self.logutil.log('>>> Rule Conditions Met ({0})'.format(
                    RuleObject.name))
                for action in RuleObject.actions:
                    actionType = action[0]
                    actionParams = action[1]
                    if actionType == 'GenerateAlert':
                        self.action_GenerateAlert(RuleObject.name,
                                                  actionParams)
                    if actionType == 'GenerateLog':
                        self.action_GenerateLog(RuleObject.name, actionParams)
        except Exception:
            traceback.print_exc()

    def action_GenerateLog(self, rule_name, action_parameters):
        self.logutil.log(
            'Execution GenerateLog Action for Rule {0}'.format(rule_name))
        pass

    def action_GenerateAlert(self, rule_name, action_parameters):
        self.logutil.log(
            'Executing GenerateAlert Action for Rule {0}'.format(rule_name))
        self.wids.generateAlert(rule_name)

    def shutdown(self):
        self.active = False
        self.terminate()