def find_new_machines(self, machines): '''parse switch structure to find new machines added to network since last call''' changed = False if self.first_time: self.first_time = False # TODO db call to see if really need to run things for machine in machines: end_point = EndPoint(machine, state='KNOWN') self.logger.debug( 'adding address to known systems {0}'.format(machine)) self.endpoints.set(end_point) changed = True else: machine_hashes = [] for machine in machines: end_point = EndPoint(machine, state='UNKNOWN') h = end_point.make_hash() ep = None if h in self.endpoints.state: ep = self.endpoints.state[h] if end_point.endpoint_data['active'] == 1: machine_hashes.append(h) if h not in self.endpoints.state: self.logger.debug( '***** detected new address {0}'.format(machine)) self.endpoints.set(end_point) changed = True elif ep is not None and end_point.endpoint_data[ 'active'] != ep.endpoint_data['active']: self.endpoints.set(end_point) changed = True if changed: self.endpoints.print_endpoint_state()
def find_new_machines(self, machines): '''parse switch structure to find new machines added to network since last call''' changed = False if self.first_time: self.first_time = False # db call to see if really need to run things # TODO - still not right if self.r: try: mac_addresses = self.r.smembers('mac_addresses') for mac in mac_addresses: try: mac_info = self.r.hgetall(mac) if 'poseidon_hash' in mac_info: try: poseidon_info = self.r.hgetall(mac_info['poseidon_hash']) if 'endpoint_data' in poseidon_info: endpoint_data = ast.literal_eval(poseidon_info['endpoint_data']) self.poseidon_logger.info( 'adding address to known systems {0}'.format(endpoint_data)) # endpoint_data seems to be incorrect #end_point = EndPoint(endpoint_data, state='KNOWN') #self.endpoints.set(end_point) except Exception as e: # pragma: no cover self.logger.error('Unable to get endpoint data for {0} from Redis because {1}'.format(mac, str(e))) except Exception as e: # pragma: no cover self.logger.error('Unable to get MAC information for {0} from Redis because {1}'.format(mac, str(e))) except Exception as e: # pragma: no cover self.logger.error('Unable to get existing DB information from Redis because {0}'.format(str(e))) for machine in machines: end_point = EndPoint(machine, state='KNOWN') self.poseidon_logger.info( 'adding address to known systems {0}'.format(machine)) self.endpoints.set(end_point) changed = True else: for machine in machines: end_point = EndPoint(machine, state='UNKNOWN') h = end_point.make_hash() ep = None if h in self.endpoints.state: ep = self.endpoints.state[h] if ep is not None and ep.endpoint_data != end_point.endpoint_data: self.poseidon_logger.info( 'Device changed: {0}:{1}'.format(h, machine)) self.endpoints.set(end_point) changed = True elif ep is None and end_point.endpoint_data['active'] == 1: self.poseidon_logger.info( 'Detected new device: {0}:{1}'.format(h, machine)) self.endpoints.set(end_point) changed = True if changed: self.endpoints.print_endpoint_state()
def test_return_endpoint_state(): uss = Update_Switch_State() uss.first_time = False endpoint_data = dict({ 'ip-address': '10.0.0.99', 'mac': '20:4c:9e:5f:e3:c3', 'segment': 'to-core-router', 'tenant': 'EXTERNAL', 'name': None }) hash_value = '3da53a95ae5d034ae37b539a24370260a36f8bb2' state = 'TEST_STATE' make_endpoint_dict(uss.endpoints.state, hash_value, state, endpoint_data) answer = dict({ '3da53a95ae5d034ae37b539a24370260a36f8bb2': EndPoint( { 'ip-address': '10.0.0.99', 'mac': '20:4c:9e:5f:e3:c3', 'segment': 'to-core-router', 'tenant': 'EXTERNAL', 'name': None }, prev_state='NONE', state='TEST_STATE', next_state='NONE') }) eps = uss.return_endpoint_state() key = '3da53a95ae5d034ae37b539a24370260a36f8bb2' assert eps.state[key].state == answer[key].state assert eps.state[key].next_state == answer[key].next_state for key2 in eps.state[key].endpoint_data: assert eps.state[key].endpoint_data[key2] == answer[key].endpoint_data[ key2]
def start_vent_collector(self, dev_hash, num_captures=1): ''' Given a device hash and optionally a number of captures to be taken, starts vent collector for that device with the options specified in poseidon.config. ''' endpoints = self.uss.return_endpoint_state() endpoint = endpoints.state.get(dev_hash, EndPoint(None)) payload = { 'nic': self.mod_configuration['collector_nic'], 'id': dev_hash, 'interval': self.mod_configuration['collector_interval'], 'filter': '\'host {0}\''.format( self.uss.endpoints.get_endpoint_ip(dev_hash)), 'iters': str(num_captures), 'metadata': endpoint.to_str()} self.logger.debug('vent payload: ' + str(payload)) vent_addr = self.mod_configuration['vent_ip'] + \ ':' + self.mod_configuration['vent_port'] uri = 'http://' + vent_addr + '/create' try: resp = requests.post(uri, data=json.dumps(payload)) self.logger.debug('collector response: ' + resp.text) except Exception as e: # pragma: no cover self.logger.debug('failed to start vent collector' + str(e))
def __init__(self): stuff = dict( { '4ee39d254db3e4a5264b75ce8ae312d69f9e73a3': EndPoint({ 'ip-address': '10.00.0.101', 'mac': 'f8:b1:56:fe:f2:de', 'segment': 'prod', 'tenant': 'FLOORPLATE', 'name': None}, prev_state='NONE', state='UNKNOWN', next_state='NONE'), 'd60c5fa5c980b1cd791208eaf62aba9fb46d3aaa': EndPoint({ 'ip-address': '10.0.0.99', 'mac': '20:4c:9e:5f:e3:c3', 'segment': 'to-core-router', 'tenant': 'EXTERNAL', 'name': None}, prev_state='NONE', state='MIRRORING', next_state='NONE'), 'd60c5fa5c980b1cd791208eaf62aba9fb46d3aa1': EndPoint({ 'ip-address': '10.0.0.99', 'mac': '20:4c:9e:5f:e3:c3', 'segment': 'to-core-router', 'tenant': 'EXTERNAL', 'name': None}, prev_state='NONE', state='MIRRORING', next_state='NONE'), 'd60c5fa5c980b1cd791208eaf62aba9fb46d3aa2': EndPoint({ 'ip-address': '10.0.0.99', 'mac': '20:4c:9e:5f:e3:c3', 'segment': 'to-core-router', 'tenant': 'EXTERNAL', 'name': None}, prev_state='NONE', state='REINVESTIGATING', next_state='NONE'), 'd60c5fa5c980b1cd791208eaf62aba9fb46d3aa3': EndPoint({ 'ip-address': '10.0.0.99', 'mac': '20:4c:9e:5f:e3:c3', 'segment': 'to-core-router', 'tenant': 'EXTERNAL', 'name': None}, prev_state='NONE', state='REINVESTIGATING', next_state='NONE') }) self.endpoints = Endpoint_Wrapper() for s in stuff: self.endpoints.state[s] = stuff[s] self.logger = None
def test_mirror_endpoint(): class Mockbcf(): def __init__(self): pass def mirror_ip(self, ip, messages=None): assert ip == '10.0.0.99' uss = Update_Switch_State() uss.sdnc = Mockbcf() stuff = dict({ 'd502caea3609d553ab16a00c554f0602c1419f58': EndPoint( { 'ip-address': '10.0.0.101', 'mac': 'f8:b1:56:fe:f2:de', 'segment': 'prod', 'tenant': 'FLOORPLATE', 'active': 1, 'name': None }, prev_state='NONE', state='UNKNOWN', next_state='NONE'), '3da53a95ae5d034ae37b539a24370260a36f8bb2': EndPoint( { 'ip-address': '10.0.0.99', 'mac': '20:4c:9e:5f:e3:c3', 'segment': 'to-core-router', 'tenant': 'EXTERNAL', 'active': 1, 'name': None }, prev_state='NONE', state='KNOWN', next_state='NONE'), }) for s in stuff: uss.endpoints.state[s] = stuff[s] ret_val = uss.mirror_endpoint('3da53a95ae5d034ae37b539a24370260a36f8bb2') assert ret_val ret_val = uss.mirror_endpoint('NOT_A_HASH') assert not ret_val
def start_vent_collector(self, dev_hash, num_captures=1): ''' Given a device hash and optionally a number of captures to be taken, starts vent collector for that device with the options specified in poseidon.config. ''' endpoints = self.uss.return_endpoint_state() endpoint = endpoints.state.get(dev_hash, EndPoint(None)) should_start = True should_start = (isinstance(self.uss.sdnc, BcfProxy) and self.uss.sdnc.get_span_fabric()) or ( not isinstance(self.uss.sdnc, BcfProxy)) payload = { 'nic': self.mod_configuration['collector_nic'], 'id': dev_hash, 'interval': self.mod_configuration['reinvestigation_frequency'], 'filter': '\'ether host {0}\''.format( self.uss.endpoints.get_endpoint_mac(dev_hash)), 'iters': str(num_captures), 'metadata': endpoint.to_str() } self.poseidon_logger.debug('vent payload: ' + str(payload)) vent_addr = self.mod_configuration['vent_ip'] + \ ':' + self.mod_configuration['vent_port'] uri = 'http://' + vent_addr + '/create' connected = self.uss.sdnc.check_connection() if connected: try: if should_start: resp = requests.post(uri, data=json.dumps(payload)) self.poseidon_logger.debug('collector response: ' + resp.text) else: self.poseidon_logger.debug( 'collector not started due to invalid span fabric configuration.' ) except Exception as e: # pragma: no cover self.poseidon_logger.debug('failed to start vent collector' + str(e)) else: self.poseidon_logger.debug( 'not starting vent collector because not connected a controller' )
def find_new_machines(self, machines): '''parse switch structure to find new machines added to network since last call''' if self.first_time: self.first_time = False # TODO db call to see if really need to run things for machine in machines: end_point = EndPoint(machine, state='KNOWN') self.logger.debug( 'adding address to known systems {0}'.format(machine)) self.endpoints.set(end_point) # print the state of things the first time self.endpoints.print_endpoint_state() else: for machine in machines: end_point = EndPoint(machine, state='UNKNOWN') h = end_point.make_hash() if h not in self.endpoints.state: self.logger.debug( '***** detected new address {0}'.format(machine)) self.endpoints.set(end_point)
def make_endpoint_dict(e_dict, my_hash, state, data): ''' make a new endpoint ''' e_dict[my_hash] = EndPoint(data, state=state)
def test_schedule_job_reinvestigation(): class MockLogger: def __init__(self): pass def debug(self, msg): pass epw = Endpoint_Wrapper() stuff = dict( { '4ee39d254db3e4a5264b75ce8ae312d69f9e73a3': EndPoint({ 'ip-address': '10.00.0.101', 'mac': 'f8:b1:56:fe:f2:de', 'segment': 'prod', 'tenant': 'FLOORPLATE', 'name': None}, prev_state='NONE', state='REINVESTIGATING', next_state='UNKNOWN'), 'd60c5fa5c980b1cd791208eaf62aba9fb46d3aaa': EndPoint({ 'ip-address': '10.0.0.99', 'mac': '20:4c:9e:5f:e3:c3', 'segment': 'to-core-router', 'tenant': 'EXTERNAL', 'name': None}, prev_state='NONE', state='UNKNOWN', next_state='REINVESTIGATING'), 'd60c5fa5c980b1cd791208eaf62aba9fb46d3aa1': EndPoint({ 'ip-address': '10.0.0.99', 'mac': '20:4c:9e:5f:e3:c3', 'segment': 'to-core-router', 'tenant': 'EXTERNAL', 'name': None}, prev_state='NONE', state='KNOWN', next_state='UNKNOWN'), 'd60c5fa5c980b1cd791208eaf62aba9fb46d3aa2': EndPoint({ 'ip-address': '10.0.0.99', 'mac': '20:4c:9e:5f:e3:c3', 'segment': 'to-core-router', 'tenant': 'EXTERNAL', 'name': None}, prev_state='NONE', state='UNKNOWN', next_state='REINVESTIGATING') }) for s in stuff: epw.state[s] = stuff[s] assert len(epw.state) == 4 poseidonMonitor.schedule_job_reinvestigation(4, epw, MockLogger()) epw = Endpoint_Wrapper() stuff = dict( { '4ee39d254db3e4a5264b75ce8ae312d69f9e73a3': EndPoint({ 'ip-address': '10.00.0.101', 'mac': 'f8:b1:56:fe:f2:de', 'segment': 'prod', 'tenant': 'FLOORPLATE', 'name': None}, prev_state='NONE', state='REINVESTIGATING', next_state='UNKNOWN'), 'd60c5fa5c980b1cd791208eaf62aba9fb46d3aaa': EndPoint({ 'ip-address': '10.0.0.99', 'mac': '20:4c:9e:5f:e3:c3', 'segment': 'to-core-router', 'tenant': 'EXTERNAL', 'name': None}, prev_state='NONE', state='UNKNOWN', next_state='REINVESTIGATING'), 'd60c5fa5c980b1cd791208eaf62aba9fb46d3aa1': EndPoint({ 'ip-address': '10.0.0.99', 'mac': '20:4c:9e:5f:e3:c3', 'segment': 'to-core-router', 'tenant': 'EXTERNAL', 'name': None}, prev_state='NONE', state='KNOWN', next_state='UNKNOWN'), 'd60c5fa5c980b1cd791208eaf62aba9fb46d3aa2': EndPoint({ 'ip-address': '10.0.0.99', 'mac': '20:4c:9e:5f:e3:c3', 'segment': 'to-core-router', 'tenant': 'EXTERNAL', 'name': None}, prev_state='NONE', state='UNKNOWN', next_state='REINVESTIGATING'), 'c60c5fa5c980b1cd791208eaf62aba9fb46d3aaa': EndPoint({ 'ip-address': '10.0.0.99', 'mac': '20:4c:9e:5f:e3:c3', 'segment': 'to-core-router', 'tenant': 'EXTERNAL', 'name': None}, prev_state='NONE', state='UNKNOWN', next_state='REINVESTIGATING'), 'c60c5fa5c980b1cd791208eaf62aba9fb46d3aa1': EndPoint({ 'ip-address': '10.0.0.99', 'mac': '20:4c:9e:5f:e3:c3', 'segment': 'to-core-router', 'tenant': 'EXTERNAL', 'name': None}, prev_state='NONE', state='UNKNOWN', next_state='REINVESTIGATING'), 'c60c5fa5c980b1cd791208eaf62aba9fb46d3aa2': EndPoint({ 'ip-address': '10.0.0.99', 'mac': '20:4c:9e:5f:e3:c3', 'segment': 'to-core-router', 'tenant': 'EXTERNAL', 'name': None}, prev_state='NONE', state='OTHER-STATE', next_state='UNKNOWN') }) # end_points = { # "hash_0": {"state": "REINVESTIGATING", "next-state": "UNKNOWN"}, # "hash_1": {"state": "UNKNOWN", "next-state": "REINVESTIGATING"}, # "hash_2": {"state": "KNOWN", "next-state": "UNKNOWN"}, # "hash_3": {"state": "UNKNOWN", "next-state": "REINVESTIGATING"}, # "hash_4": {"state": "UNKNOWN", "next-state": "REINVESTIGATING"}, # "hash_5": {"state": "UNKNOWN", "next-state": "REINVESTIGATING"}, # "hash_6": {"state": "OTHER-STATE", "next-state": "UNKNOWN"} #} for s in stuff: epw.state[s] = stuff[s] poseidonMonitor.schedule_job_reinvestigation(4, epw, MockLogger()) epw = Endpoint_Wrapper() #end_points = {} poseidonMonitor.schedule_job_reinvestigation(4, epw, MockLogger()) epw.state['4ee39d254db3e4a5264b75ce8ae312d69f9e73a3'] = stuff['4ee39d254db3e4a5264b75ce8ae312d69f9e73a3'] #end_points = {"hash_0": {"MALFORMED": "YES"}} poseidonMonitor.schedule_job_reinvestigation(4, epw, MockLogger())