def test_region_name(self, mock_client): # Region specified for session FlowLogsReader('some_group', region_name='some-region') mock_client.assert_called_with('logs', region_name='some-region') # None specified FlowLogsReader('some_group') mock_client.assert_called_with('logs')
def test_profile_name(self, mock_session): # profile_name specified FlowLogsReader('some_group', profile_name='my-profile') mock_session.Session.assert_called_with(profile_name='my-profile') # No profile specified FlowLogsReader('some_group') mock_session.Session.assert_called_with()
def setUp(self, mock_boto3): self.mock_client = MagicMock() mock_boto3.client.return_value = self.mock_client self.start_time = datetime(2015, 8, 12, 12, 0, 0) self.end_time = datetime(2015, 8, 12, 13, 0, 0) self.inst = FlowLogsReader( 'group_name', start_time=self.start_time, end_time=self.end_time, )
def setUp(self): self.mock_client = MagicMock() self.start_time = datetime(2015, 8, 12, 12, 0, 0) self.end_time = datetime(2015, 8, 12, 13, 0, 0) self.inst = FlowLogsReader( 'group_name', start_time=self.start_time, end_time=self.end_time, filter_pattern='REJECT', boto_client=self.mock_client, )
def test_region(self, mock_client): # Region specified FlowLogsReader('some_group', region_name='some-region') mock_client.assert_called_with('logs', region_name='some-region') # No region specified - assume configuration file worked FlowLogsReader('some_group') mock_client.assert_called_with('logs') # No region specified and no configuration file def mock_response(*args, **kwargs): if 'region_name' not in kwargs: raise NoRegionError mock_client.side_effect = mock_response FlowLogsReader('some_group') mock_client.assert_called_with('logs', region_name=DEFAULT_REGION_NAME)
def test_region_name(self, mock_session): # Region specified for session FlowLogsReader('some_group', region_name='some-region') mock_session.Session.assert_called_with(region_name='some-region') # Region specified for client, not for session FlowLogsReader('some_group', boto_client_kwargs={'region_name': 'my-region'}) mock_session.Session().client.assert_called_with( 'logs', region_name='my-region') # No region specified for session or client - use the default def mock_response(*args, **kwargs): if 'region_name' not in kwargs: raise NoRegionError mock_session.Session().client.side_effect = mock_response FlowLogsReader('some_group') mock_session.Session().client.assert_called_with( 'logs', region_name=DEFAULT_REGION_NAME)
def test_get_fields(self, mock_client): cwl_client = MagicMock() ec2_client = mock_client.return_value ec2_client.describe_flow_logs.return_value = { 'FlowLogs': [ {'LogFormat': '${srcaddr} ${dstaddr} ${start} ${log-status}'} ] } reader = FlowLogsReader( 'some_group', boto_client=cwl_client, fields=None, ) self.assertEqual( reader.fields, ('srcaddr', 'dstaddr', 'start', 'log_status') ) ec2_client.describe_flow_logs.assert_called_once_with( Filters=[{'Name': 'log-group-name', 'Values': ['some_group']}] )
class FlowLogsReaderTestCase(TestCase): def setUp(self): self.mock_client = MagicMock() self.start_time = datetime(2015, 8, 12, 12, 0, 0) self.end_time = datetime(2015, 8, 12, 13, 0, 0) self.inst = FlowLogsReader( 'group_name', start_time=self.start_time, end_time=self.end_time, filter_pattern='REJECT', boto_client=self.mock_client, ) def test_init(self): self.assertEqual(self.inst.log_group_name, 'group_name') self.assertEqual(datetime.utcfromtimestamp(self.inst.start_ms // 1000), self.start_time) self.assertEqual(datetime.utcfromtimestamp(self.inst.end_ms // 1000), self.end_time) self.assertEqual(self.inst.paginator_kwargs['filterPattern'], 'REJECT') @patch('flowlogs_reader.flowlogs_reader.boto3.session', autospec=True) def test_region_name(self, mock_session): # Region specified for session FlowLogsReader('some_group', region_name='some-region') mock_session.Session.assert_called_with(region_name='some-region') # Region specified for client, not for session FlowLogsReader('some_group', boto_client_kwargs={'region_name': 'my-region'}) mock_session.Session().client.assert_called_with( 'logs', region_name='my-region') # No region specified for session or client - use the default def mock_response(*args, **kwargs): if 'region_name' not in kwargs: raise NoRegionError mock_session.Session().client.side_effect = mock_response FlowLogsReader('some_group') mock_session.Session().client.assert_called_with( 'logs', region_name=DEFAULT_REGION_NAME) @patch('flowlogs_reader.flowlogs_reader.boto3.session', autospec=True) def test_profile_name(self, mock_session): # profile_name specified FlowLogsReader('some_group', profile_name='my-profile') mock_session.Session.assert_called_with(profile_name='my-profile') # No profile specified FlowLogsReader('some_group') mock_session.Session.assert_called_with() def test_read_streams(self): paginator = MagicMock() paginator.paginate.return_value = [ { 'events': [0] }, { 'events': [1, 2] }, { 'events': [3, 4, 5] }, ] self.mock_client.get_paginator.return_value = paginator actual = list(self.inst._read_streams()) expected = [0, 1, 2, 3, 4, 5] self.assertEqual(actual, expected) def test_iteration(self): paginator = MagicMock() paginator.paginate.return_value = [ { 'events': [ { 'logStreamName': 'log_0', 'message': V2_RECORDS[0] }, { 'logStreamName': 'log_0', 'message': V2_RECORDS[1] }, ], }, { 'events': [ { 'logStreamName': 'log_0', 'message': V2_RECORDS[2] }, { 'logStreamName': 'log_1', 'message': V2_RECORDS[3] }, { 'logStreamName': 'log_2', 'message': V2_RECORDS[4] }, ], }, ] self.mock_client.get_paginator.return_value = paginator # Calling list on the instance causes it to iterate through all records actual = [next(self.inst)] + list(self.inst) expected = [ FlowRecord.from_cwl_event({'message': x}) for x in V2_RECORDS ] self.assertEqual(actual, expected) def test_iteration_error(self): # Simulate the paginator failing def _get_paginator(*args, **kwargs): event_0 = {'logStreamName': 'log_0', 'message': V2_RECORDS[0]} event_1 = {'logStreamName': 'log_0', 'message': V2_RECORDS[1]} for item in [{'events': [event_0, event_1]}]: yield item err_msg = '{}: {}'.format(DUPLICATE_NEXT_TOKEN_MESSAGE, 'token') raise PaginationError(message=err_msg) self.mock_client.get_paginator.return_value.paginate.side_effect = ( _get_paginator) # Don't fail if botocore's paginator raises a PaginationError actual = [next(self.inst)] + list(self.inst) records = V2_RECORDS[:2] expected = [FlowRecord.from_cwl_event({'message': x}) for x in records] self.assertEqual(actual, expected) def test_iteration_unexpecetd_error(self): # Simulate the paginator failing def _get_paginator(*args, **kwargs): event_0 = {'logStreamName': 'log_0', 'message': V2_RECORDS[0]} yield {'events': [event_0]} raise PaginationError(message='other error') self.mock_client.get_paginator.return_value.paginate.side_effect = ( _get_paginator) # Fail for unexpected PaginationError self.assertRaises(PaginationError, lambda: list(self.inst)) def test_threads(self): inst = FlowLogsReader( 'group_name', start_time=self.start_time, end_time=self.end_time, filter_pattern='REJECT', boto_client=self.mock_client, thread_count=1, ) paginators = [] def _get_paginator(operation): nonlocal paginators paginator = MagicMock() if operation == 'describe_log_streams': paginator.paginate.return_value = [ { 'logStreams': [ { 'logStreamName': 'too_late', 'firstEventTimestamp': inst.end_ms, 'lastEventTimestamp': inst.start_ms, }, { 'logStreamName': 'too_late', 'firstEventTimestamp': inst.end_ms - 1, 'lastEventTimestamp': (inst.start_ms - LAST_EVENT_DELAY_MSEC - 1), }, ], }, { 'logStreams': [ { 'logStreamName': 'first_stream', 'firstEventTimestamp': inst.start_ms, 'lastEventTimestamp': inst.end_ms, }, { 'logStreamName': 'second_stream', 'firstEventTimestamp': inst.start_ms, 'lastEventTimestamp': inst.end_ms, }, ], }, ] elif operation == 'filter_log_events': paginator.paginate.return_value = [ { 'events': [ { 'message': V2_RECORDS[0] }, { 'message': V2_RECORDS[1] }, ], }, { 'events': [ { 'message': V2_RECORDS[2] }, { 'message': V2_RECORDS[3] }, ], }, ] else: self.fail('invalid operation') paginators.append(paginator) return paginator self.mock_client.get_paginator.side_effect = _get_paginator events = list(inst) self.assertEqual(len(events), 8) paginators[0].paginate.assert_called_once_with( logGroupName='group_name', orderBy='LastEventTime', descending=True, ) paginators[1].paginate.assert_called_once_with( logGroupName='group_name', startTime=inst.start_ms, endTime=inst.end_ms, interleaved=True, filterPattern='REJECT', logStreamNames=['first_stream'], ) paginators[2].paginate.assert_called_once_with( logGroupName='group_name', startTime=inst.start_ms, endTime=inst.end_ms, interleaved=True, filterPattern='REJECT', logStreamNames=['second_stream'], )
class FlowLogsReaderTestCase(TestCase): @patch('flowlogs_reader.flowlogs_reader.boto3', autospec=True) def setUp(self, mock_boto3): self.mock_session = MagicMock() mock_boto3.session.Session.return_value = self.mock_session self.mock_client = MagicMock() self.mock_session.client.return_value = self.mock_client self.start_time = datetime(2015, 8, 12, 12, 0, 0) self.end_time = datetime(2015, 8, 12, 13, 0, 0) self.inst = FlowLogsReader( 'group_name', start_time=self.start_time, end_time=self.end_time, ) def test_init(self): self.assertEqual(self.inst.log_group_name, 'group_name') self.assertEqual(datetime.utcfromtimestamp(self.inst.start_ms // 1000), self.start_time) self.assertEqual(datetime.utcfromtimestamp(self.inst.end_ms // 1000), self.end_time) @patch('flowlogs_reader.flowlogs_reader.boto3.session', autospec=True) def test_region_name(self, mock_session): # Region specified FlowLogsReader('some_group', region_name='some-region') mock_session.Session.assert_called_with(region_name='some-region') # No region specified - assume configuration file worked FlowLogsReader('some_group') mock_session.Session.assert_called_with() # Region specified in boto_client_kwargs FlowLogsReader('some_group', boto_client_kwargs={'region_name': 'my-region'}) mock_session.Session.assert_called_with(region_name='my-region') # No region specified and no configuration file def mock_response(*args, **kwargs): if 'region_name' not in kwargs: raise NoRegionError mock_session.Session().client.side_effect = mock_response FlowLogsReader('some_group') mock_session.Session().client.assert_called_with( 'logs', region_name=DEFAULT_REGION_NAME) @patch('flowlogs_reader.flowlogs_reader.boto3.session', autospec=True) def test_profile_name(self, mock_session): # profile_name specified FlowLogsReader('some_group', profile_name='my-profile') mock_session.Session.assert_called_with(profile_name='my-profile') # No profile specified FlowLogsReader('some_group') mock_session.Session.assert_called_with() def test_read_streams(self): paginator = MagicMock() paginator.paginate.return_value = [ { 'events': [0] }, { 'events': [1, 2] }, { 'events': [3, 4, 5] }, ] self.mock_client.get_paginator.return_value = paginator actual = list(self.inst._read_streams()) expected = [0, 1, 2, 3, 4, 5] self.assertEqual(actual, expected) def test_iteration(self): paginator = MagicMock() paginator.paginate.return_value = [ { 'events': [ { 'logStreamName': 'log_0', 'message': SAMPLE_RECORDS[0] }, { 'logStreamName': 'log_0', 'message': SAMPLE_RECORDS[1] }, ], }, { 'events': [ { 'logStreamName': 'log_0', 'message': SAMPLE_RECORDS[2] }, { 'logStreamName': 'log_1', 'message': SAMPLE_RECORDS[3] }, { 'logStreamName': 'log_2', 'message': SAMPLE_RECORDS[4] }, ], }, ] self.mock_client.get_paginator.return_value = paginator # Calling list on the instance causes it to iterate through all records actual = list(self.inst) expected = [FlowRecord.from_message(x) for x in SAMPLE_RECORDS] self.assertEqual(actual, expected)
class FlowLogsReaderTestCase(TestCase): @patch('flowlogs_reader.flowlogs_reader.boto3', autospec=True) def setUp(self, mock_boto3): self.mock_session = MagicMock() mock_boto3.session.Session.return_value = self.mock_session self.mock_client = MagicMock() self.mock_session.client.return_value = self.mock_client self.start_time = datetime(2015, 8, 12, 12, 0, 0) self.end_time = datetime(2015, 8, 12, 13, 0, 0) self.inst = FlowLogsReader( 'group_name', start_time=self.start_time, end_time=self.end_time, ) def test_init(self): self.assertEqual(self.inst.log_group_name, 'group_name') self.assertEqual( datetime.utcfromtimestamp(self.inst.start_ms // 1000), self.start_time ) self.assertEqual( datetime.utcfromtimestamp(self.inst.end_ms // 1000), self.end_time ) @patch('flowlogs_reader.flowlogs_reader.boto3.session', autospec=True) def test_region_name(self, mock_session): # Region specified FlowLogsReader('some_group', region_name='some-region') mock_session.Session.assert_called_with(region_name='some-region') # No region specified - assume configuration file worked FlowLogsReader('some_group') mock_session.Session.assert_called_with() # Region specified in boto_client_kwargs FlowLogsReader( 'some_group', boto_client_kwargs={'region_name': 'my-region'} ) mock_session.Session.assert_called_with(region_name='my-region') # No region specified and no configuration file def mock_response(*args, **kwargs): if 'region_name' not in kwargs: raise NoRegionError mock_session.Session().client.side_effect = mock_response FlowLogsReader('some_group') mock_session.Session().client.assert_called_with( 'logs', region_name=DEFAULT_REGION_NAME ) @patch('flowlogs_reader.flowlogs_reader.boto3.session', autospec=True) def test_profile_name(self, mock_session): # profile_name specified FlowLogsReader('some_group', profile_name='my-profile') mock_session.Session.assert_called_with(profile_name='my-profile') # No profile specified FlowLogsReader('some_group') mock_session.Session.assert_called_with() def test_read_streams(self): paginator = MagicMock() paginator.paginate.return_value = [ {'events': [0]}, {'events': [1, 2]}, {'events': [3, 4, 5]}, ] self.mock_client.get_paginator.return_value = paginator actual = list(self.inst._read_streams()) expected = [0, 1, 2, 3, 4, 5] self.assertEqual(actual, expected) def test_iteration(self): paginator = MagicMock() paginator.paginate.return_value = [ { 'events': [ {'logStreamName': 'log_0', 'message': SAMPLE_RECORDS[0]}, {'logStreamName': 'log_0', 'message': SAMPLE_RECORDS[1]}, ], }, { 'events': [ {'logStreamName': 'log_0', 'message': SAMPLE_RECORDS[2]}, {'logStreamName': 'log_1', 'message': SAMPLE_RECORDS[3]}, {'logStreamName': 'log_2', 'message': SAMPLE_RECORDS[4]}, ], }, ] self.mock_client.get_paginator.return_value = paginator # Calling list on the instance causes it to iterate through all records actual = list(self.inst) expected = [FlowRecord.from_message(x) for x in SAMPLE_RECORDS] self.assertEqual(actual, expected)
class FlowLogsReaderTestCase(TestCase): def setUp(self): self.mock_client = MagicMock() self.start_time = datetime(2015, 8, 12, 12, 0, 0) self.end_time = datetime(2015, 8, 12, 13, 0, 0) self.inst = FlowLogsReader( 'group_name', start_time=self.start_time, end_time=self.end_time, filter_pattern='REJECT', boto_client=self.mock_client, ) def test_init(self): self.assertEqual(self.inst.log_group_name, 'group_name') self.assertEqual( datetime.utcfromtimestamp(self.inst.start_ms // 1000), self.start_time ) self.assertEqual( datetime.utcfromtimestamp(self.inst.end_ms // 1000), self.end_time ) self.assertEqual( self.inst.paginator_kwargs['filterPattern'], 'REJECT' ) @patch('flowlogs_reader.flowlogs_reader.boto3.session', autospec=True) def test_region_name(self, mock_session): # Region specified for session FlowLogsReader('some_group', region_name='some-region') mock_session.Session.assert_called_with(region_name='some-region') # Region specified for client, not for session FlowLogsReader( 'some_group', boto_client_kwargs={'region_name': 'my-region'} ) mock_session.Session().client.assert_called_with( 'logs', region_name='my-region' ) # No region specified for session or client - use the default def mock_response(*args, **kwargs): if 'region_name' not in kwargs: raise NoRegionError mock_session.Session().client.side_effect = mock_response FlowLogsReader('some_group') mock_session.Session().client.assert_called_with( 'logs', region_name=DEFAULT_REGION_NAME ) @patch('flowlogs_reader.flowlogs_reader.boto3.session', autospec=True) def test_profile_name(self, mock_session): # profile_name specified FlowLogsReader('some_group', profile_name='my-profile') mock_session.Session.assert_called_with(profile_name='my-profile') # No profile specified FlowLogsReader('some_group') mock_session.Session.assert_called_with() def test_read_streams(self): paginator = MagicMock() paginator.paginate.return_value = [ {'events': [0]}, {'events': [1, 2]}, {'events': [3, 4, 5]}, ] self.mock_client.get_paginator.return_value = paginator actual = list(self.inst._read_streams()) expected = [0, 1, 2, 3, 4, 5] self.assertEqual(actual, expected) def test_iteration(self): paginator = MagicMock() paginator.paginate.return_value = [ { 'events': [ {'logStreamName': 'log_0', 'message': SAMPLE_RECORDS[0]}, {'logStreamName': 'log_0', 'message': SAMPLE_RECORDS[1]}, ], }, { 'events': [ {'logStreamName': 'log_0', 'message': SAMPLE_RECORDS[2]}, {'logStreamName': 'log_1', 'message': SAMPLE_RECORDS[3]}, {'logStreamName': 'log_2', 'message': SAMPLE_RECORDS[4]}, ], }, ] self.mock_client.get_paginator.return_value = paginator # Calling list on the instance causes it to iterate through all records actual = [next(self.inst)] + list(self.inst) expected = [FlowRecord.from_message(x) for x in SAMPLE_RECORDS] self.assertEqual(actual, expected) def test_iteration_error(self): # Simulate the paginator failing def _get_paginator(*args, **kwargs): event_0 = {'logStreamName': 'log_0', 'message': SAMPLE_RECORDS[0]} event_1 = {'logStreamName': 'log_0', 'message': SAMPLE_RECORDS[1]} for item in [{'events': [event_0, event_1]}]: yield item err_msg = '{}: {}'.format(DUPLICATE_NEXT_TOKEN_MESSAGE, 'token') raise PaginationError(message=err_msg) self.mock_client.get_paginator.return_value.paginate.side_effect = ( _get_paginator ) # Don't fail if botocore's paginator raises a PaginationError actual = [next(self.inst)] + list(self.inst) expected = [FlowRecord.from_message(x) for x in SAMPLE_RECORDS[:2]] self.assertEqual(actual, expected) def test_iteration_unexpecetd_error(self): # Simulate the paginator failing def _get_paginator(*args, **kwargs): event_0 = {'logStreamName': 'log_0', 'message': SAMPLE_RECORDS[0]} yield {'events': [event_0]} raise PaginationError(message='other error') self.mock_client.get_paginator.return_value.paginate.side_effect = ( _get_paginator ) # Fail for unexpected PaginationError self.assertRaises(PaginationError, lambda: list(self.inst))
def get_connections(account_id, region, connections=None): logging.info('Getting connections for {} {}..'.format(account_id, region)) sts = boto3.client('sts') response = sts.assume_role(RoleArn='arn:aws:iam::' + account_id + ':role/fullstop', RoleSessionName='fullstop', DurationSeconds=900) session = boto3.Session( aws_access_key_id=response['Credentials']['AccessKeyId'], aws_secret_access_key=response['Credentials']['SecretAccessKey'], aws_session_token=response['Credentials']['SessionToken'], region_name=region) ec2_client = session.client('ec2') rds = session.client('rds') instance_ids = [] interfaces = {} logging.info('%s: Collecting network interfaces..', account_id) res = ec2_client.describe_network_interfaces() lb_names = [] for iface in res['NetworkInterfaces']: if 'Association' in iface: # public IP involved interfaces[iface['NetworkInterfaceId']] = iface descr = iface.get('Description') if descr.startswith('ELB'): words = descr.split() lb_names.append(words[-1]) if 'Attachment' in iface and 'InstanceId' in iface['Attachment']: instance_ids.append(iface['Attachment']['InstanceId']) lb_dns_names = get_lb_dns_names(session, lb_names) res = rds.describe_db_instances() for db in res['DBInstances']: if db['PubliclyAccessible']: host, port = (db['Endpoint']['Address'], db['Endpoint']['Port']) try: ai = socket.getaddrinfo(host, port, family=socket.AF_INET, socktype=socket.SOCK_STREAM) except: ai = [] for _, _, _, _, ip_port in ai: ip, _ = ip_port NAMES[ip] = '/'.join((account_id, region, host)) local_names = {} instance_count = 0 logging.info('%s: Collecting public EC2 instances..', account_id) res = ec2_client.describe_instances(InstanceIds=instance_ids) for reservation in res['Reservations']: for inst in reservation['Instances']: instance_count += 1 if 'PrivateIpAddress' in inst and 'Tags' in inst: name = ''.join( [x['Value'] for x in inst['Tags'] if x['Key'] == 'Name']) local_names[inst['PrivateIpAddress']] = '/'.join( (name, inst.get('PublicIpAddress', ''))) logging.info( '%s: Got {} interfaces, {} load balancers and {} instances'.format( len(interfaces), len(lb_dns_names), instance_count), account_id) connections = collections.Counter() if connections is None else connections now = datetime.datetime.utcnow() start_time = LAST_TIMES.get(account_id, now - datetime.timedelta(minutes=10)) reader = FlowLogsReader('vpc-flowgroup', region_name=region, start_time=start_time, end_time=now) reader.logs_client = session.client('logs') record_count = 0 new_connections = 0 for record in reader: # just consider accepted packets if record.action == 'ACCEPT': record_count += 1 src = ipaddress.ip_address(record.srcaddr) # only look at packets received at public interfaces if record.interface_id in interfaces and not src.is_private: name = get_name(record.srcaddr) dest = interfaces.get(record.interface_id, {}).get('Description') if not dest or dest.startswith('Primary'): # EC2 instance if record.srcaddr in AWS_IPS and AWS_S3_DOMAIN_PATTERN.match( name.split('/')[0]): # ignore S3 public IPs # (most probably packets from S3 to public EC2) continue dest = local_names.get(record.dstaddr, record.dstaddr) elif dest.startswith('ELB'): # ELB words = dest.split() dest = lb_dns_names.get(words[-1], dest) elif dest.startswith('RDS'): # RDS instance public_ip = interfaces.get(record.interface_id, {}).get('Association', {}).get('PublicIp', '') dest = NAMES.get(public_ip, 'RDS/' + public_ip) elif dest: dest += '/' + interfaces.get(record.interface_id, {}).get( 'Association', {}).get('PublicIp', '') if 'NAT' not in dest and 'Odd' not in dest: conn = (name, dest, record.dstport) if conn not in connections: new_connections += 1 connections[conn] += 1 logging.info( '%s: Got {} records and {} new connections'.format( record_count, new_connections), account_id) LAST_TIMES[account_id] = now return connections
def get_connections(account_id, region, connections=None): logging.info('Getting connections for {} {}..'.format(account_id, region)) sts = boto3.client('sts') response = sts.assume_role(RoleArn='arn:aws:iam::' + account_id + ':role/fullstop', RoleSessionName='fullstop', DurationSeconds=900) session = boto3.Session(aws_access_key_id=response['Credentials']['AccessKeyId'], aws_secret_access_key=response['Credentials']['SecretAccessKey'], aws_session_token=response['Credentials']['SessionToken'], region_name=region) ec2_client = session.client('ec2') rds = session.client('rds') instance_ids = [] interfaces = {} logging.info('%s: Collecting network interfaces..', account_id) res = ec2_client.describe_network_interfaces() lb_names = [] for iface in res['NetworkInterfaces']: if 'Association' in iface: # public IP involved interfaces[iface['NetworkInterfaceId']] = iface descr = iface.get('Description') if descr.startswith('ELB'): words = descr.split() lb_names.append(words[-1]) if 'Attachment' in iface and 'InstanceId' in iface['Attachment']: instance_ids.append(iface['Attachment']['InstanceId']) lb_dns_names = get_lb_dns_names(session, lb_names) res = rds.describe_db_instances() for db in res['DBInstances']: if db['PubliclyAccessible']: host, port = (db['Endpoint']['Address'], db['Endpoint']['Port']) try: ai = socket.getaddrinfo(host, port, family=socket.AF_INET, socktype=socket.SOCK_STREAM) except: ai = [] for _, _, _, _, ip_port in ai: ip, _ = ip_port NAMES[ip] = '/'.join((account_id, region, host)) local_names = {} instance_count = 0 logging.info('%s: Collecting public EC2 instances..', account_id) res = ec2_client.describe_instances(InstanceIds=instance_ids) for reservation in res['Reservations']: for inst in reservation['Instances']: instance_count += 1 if 'PrivateIpAddress' in inst and 'Tags' in inst: name = ''.join([x['Value'] for x in inst['Tags'] if x['Key'] == 'Name']) local_names[inst['PrivateIpAddress']] = '/'.join((name, inst.get('PublicIpAddress', ''))) logging.info('%s: Got {} interfaces, {} load balancers and {} instances'.format( len(interfaces), len(lb_dns_names), instance_count), account_id) connections = collections.Counter() if connections is None else connections now = datetime.datetime.utcnow() start_time = LAST_TIMES.get(account_id, now - datetime.timedelta(minutes=10)) reader = FlowLogsReader('vpc-flowgroup', region_name=region, start_time=start_time, end_time=now) reader.logs_client = session.client('logs') record_count = 0 new_connections = 0 for record in reader: # just consider accepted packets if record.action == 'ACCEPT': record_count += 1 src = ipaddress.ip_address(record.srcaddr) # only look at packets received at public interfaces if record.interface_id in interfaces and not src.is_private: name = get_name(record.srcaddr) dest = interfaces.get(record.interface_id, {}).get('Description') if not dest or dest.startswith('Primary'): # EC2 instance if record.srcaddr in AWS_IPS and AWS_S3_DOMAIN_PATTERN.match(name.split('/')[0]): # ignore S3 public IPs # (most probably packets from S3 to public EC2) continue dest = local_names.get(record.dstaddr, record.dstaddr) elif dest.startswith('ELB'): # ELB words = dest.split() dest = lb_dns_names.get(words[-1], dest) elif dest.startswith('RDS'): # RDS instance public_ip = interfaces.get(record.interface_id, {}).get('Association', {}).get('PublicIp', '') dest = NAMES.get(public_ip, 'RDS/' + public_ip) elif dest: dest += '/' + interfaces.get(record.interface_id, {}).get('Association', {}).get('PublicIp', '') if 'NAT' not in dest and 'Odd' not in dest: conn = (name, dest, record.dstport) if conn not in connections: new_connections += 1 connections[conn] += 1 logging.info('%s: Got {} records and {} new connections'.format(record_count, new_connections), account_id) LAST_TIMES[account_id] = now return connections
def test_threads(self): inst = FlowLogsReader( 'group_name', start_time=self.start_time, end_time=self.end_time, filter_pattern='REJECT', boto_client=self.mock_client, thread_count=1, ) paginators = [] def _get_paginator(operation): nonlocal paginators paginator = MagicMock() if operation == 'describe_log_streams': paginator.paginate.return_value = [ { 'logStreams': [ { 'logStreamName': 'too_late', 'firstEventTimestamp': inst.end_ms, 'lastEventTimestamp': inst.start_ms, }, { 'logStreamName': 'too_late', 'firstEventTimestamp': inst.end_ms - 1, 'lastEventTimestamp': ( inst.start_ms - LAST_EVENT_DELAY_MSEC - 1 ), }, ], }, { 'logStreams': [ { 'logStreamName': 'first_stream', 'firstEventTimestamp': inst.start_ms, 'lastEventTimestamp': inst.end_ms, }, { 'logStreamName': 'second_stream', 'firstEventTimestamp': inst.start_ms, 'lastEventTimestamp': inst.end_ms, }, ], }, ] elif operation == 'filter_log_events': paginator.paginate.return_value = [ { 'events': [ {'message': V2_RECORDS[0]}, {'message': V2_RECORDS[1]}, ], }, { 'events': [ {'message': V2_RECORDS[2]}, {'message': V2_RECORDS[3]}, ], }, ] else: self.fail('invalid operation') paginators.append(paginator) return paginator self.mock_client.get_paginator.side_effect = _get_paginator events = list(inst) self.assertEqual(len(events), 8) paginators[0].paginate.assert_called_once_with( logGroupName='group_name', orderBy='LastEventTime', descending=True, ) paginators[1].paginate.assert_called_once_with( logGroupName='group_name', startTime=inst.start_ms, endTime=inst.end_ms, interleaved=True, filterPattern='REJECT', logStreamNames=['first_stream'], ) paginators[2].paginate.assert_called_once_with( logGroupName='group_name', startTime=inst.start_ms, endTime=inst.end_ms, interleaved=True, filterPattern='REJECT', logStreamNames=['second_stream'], )
class FlowLogsReaderTestCase(TestCase): def setUp(self): self.mock_client = MagicMock() self.start_time = datetime(2015, 8, 12, 12, 0, 0) self.end_time = datetime(2015, 8, 12, 13, 0, 0) self.inst = FlowLogsReader( 'group_name', start_time=self.start_time, end_time=self.end_time, filter_pattern='REJECT', boto_client=self.mock_client, ) def test_init(self): self.assertEqual(self.inst.log_group_name, 'group_name') self.assertEqual( datetime.utcfromtimestamp(self.inst.start_ms // 1000), self.start_time, ) self.assertEqual( datetime.utcfromtimestamp(self.inst.end_ms // 1000), self.end_time ) self.assertEqual(self.inst.paginator_kwargs['filterPattern'], 'REJECT') @patch('flowlogs_reader.flowlogs_reader.boto3.client', autospec=True) def test_region_name(self, mock_client): # Region specified for session FlowLogsReader('some_group', region_name='some-region') mock_client.assert_called_with('logs', region_name='some-region') # None specified FlowLogsReader('some_group') mock_client.assert_called_with('logs') @patch('flowlogs_reader.flowlogs_reader.boto3.client', autospec=True) def test_get_fields(self, mock_client): cwl_client = MagicMock() ec2_client = mock_client.return_value ec2_client.describe_flow_logs.return_value = { 'FlowLogs': [ {'LogFormat': '${srcaddr} ${dstaddr} ${start} ${log-status}'} ] } reader = FlowLogsReader( 'some_group', boto_client=cwl_client, fields=None, ) self.assertEqual( reader.fields, ('srcaddr', 'dstaddr', 'start', 'log_status') ) ec2_client.describe_flow_logs.assert_called_once_with( Filters=[{'Name': 'log-group-name', 'Values': ['some_group']}] ) def test_read_streams(self): paginator = MagicMock() paginator.paginate.return_value = [ { 'events': [ {'logStreamName': 'log_0', 'message': V2_RECORDS[0]}, {'logStreamName': 'log_0', 'message': V2_RECORDS[1]}, ], }, { 'events': [ {'logStreamName': 'log_0', 'message': V2_RECORDS[2]}, {'logStreamName': 'log_1', 'message': V2_RECORDS[3]}, {'logStreamName': 'log_2', 'message': V2_RECORDS[4]}, ], }, ] self.mock_client.get_paginator.return_value = paginator actual = list(self.inst._read_streams()) expected = [] for page in paginator.paginate.return_value: expected += page['events'] self.assertEqual(actual, expected) def test_iteration(self): paginator = MagicMock() paginator.paginate.return_value = [ { 'events': [ {'logStreamName': 'log_0', 'message': V2_RECORDS[0]}, {'logStreamName': 'log_0', 'message': V2_RECORDS[1]}, ], }, { 'events': [ {'logStreamName': 'log_0', 'message': V2_RECORDS[2]}, {'logStreamName': 'log_1', 'message': V2_RECORDS[3]}, {'logStreamName': 'log_2', 'message': V2_RECORDS[4]}, ], }, ] self.mock_client.get_paginator.return_value = paginator # Calling list on the instance causes it to iterate through all records actual = [next(self.inst)] + list(self.inst) expected = [ FlowRecord.from_cwl_event({'message': x}) for x in V2_RECORDS ] self.assertEqual(actual, expected) expected_bytes = 0 all_pages = paginator.paginate.return_value expected_bytes = sum( len(e['message']) for p in all_pages for e in p['events'] ) self.assertEqual(self.inst.bytes_processed, expected_bytes) def test_iteration_error(self): # Simulate the paginator failing def _get_paginator(*args, **kwargs): event_0 = {'logStreamName': 'log_0', 'message': V2_RECORDS[0]} event_1 = {'logStreamName': 'log_0', 'message': V2_RECORDS[1]} for item in [{'events': [event_0, event_1]}]: yield item err_msg = '{}: {}'.format(DUPLICATE_NEXT_TOKEN_MESSAGE, 'token') raise PaginationError(message=err_msg) self.mock_client.get_paginator.return_value.paginate.side_effect = ( _get_paginator ) # Don't fail if botocore's paginator raises a PaginationError actual = [next(self.inst)] + list(self.inst) records = V2_RECORDS[:2] expected = [FlowRecord.from_cwl_event({'message': x}) for x in records] self.assertEqual(actual, expected) def test_iteration_unexpecetd_error(self): # Simulate the paginator failing def _get_paginator(*args, **kwargs): event_0 = {'logStreamName': 'log_0', 'message': V2_RECORDS[0]} yield {'events': [event_0]} raise PaginationError(message='other error') self.mock_client.get_paginator.return_value.paginate.side_effect = ( _get_paginator ) # Fail for unexpected PaginationError self.assertRaises(PaginationError, lambda: list(self.inst)) def test_threads(self): inst = FlowLogsReader( 'group_name', start_time=self.start_time, end_time=self.end_time, filter_pattern='REJECT', boto_client=self.mock_client, thread_count=1, ) paginators = [] def _get_paginator(operation): nonlocal paginators paginator = MagicMock() if operation == 'describe_log_streams': paginator.paginate.return_value = [ { 'logStreams': [ { 'logStreamName': 'too_late', 'firstEventTimestamp': inst.end_ms, 'lastEventTimestamp': inst.start_ms, }, { 'logStreamName': 'too_late', 'firstEventTimestamp': inst.end_ms - 1, 'lastEventTimestamp': ( inst.start_ms - LAST_EVENT_DELAY_MSEC - 1 ), }, ], }, { 'logStreams': [ { 'logStreamName': 'first_stream', 'firstEventTimestamp': inst.start_ms, 'lastEventTimestamp': inst.end_ms, }, { 'logStreamName': 'second_stream', 'firstEventTimestamp': inst.start_ms, 'lastEventTimestamp': inst.end_ms, }, ], }, ] elif operation == 'filter_log_events': paginator.paginate.return_value = [ { 'events': [ {'message': V2_RECORDS[0]}, {'message': V2_RECORDS[1]}, ], }, { 'events': [ {'message': V2_RECORDS[2]}, {'message': V2_RECORDS[3]}, ], }, ] else: self.fail('invalid operation') paginators.append(paginator) return paginator self.mock_client.get_paginator.side_effect = _get_paginator events = list(inst) self.assertEqual(len(events), 8) paginators[0].paginate.assert_called_once_with( logGroupName='group_name', orderBy='LastEventTime', descending=True, ) paginators[1].paginate.assert_called_once_with( logGroupName='group_name', startTime=inst.start_ms, endTime=inst.end_ms, interleaved=True, filterPattern='REJECT', logStreamNames=['first_stream'], ) paginators[2].paginate.assert_called_once_with( logGroupName='group_name', startTime=inst.start_ms, endTime=inst.end_ms, interleaved=True, filterPattern='REJECT', logStreamNames=['second_stream'], )
class FlowLogsReaderTestCase(TestCase): @patch('flowlogs_reader.flowlogs_reader.boto3', autospec=True) def setUp(self, mock_boto3): self.mock_client = MagicMock() mock_boto3.client.return_value = self.mock_client self.start_time = datetime(2015, 8, 12, 12, 0, 0) self.end_time = datetime(2015, 8, 12, 13, 0, 0) self.inst = FlowLogsReader( 'group_name', start_time=self.start_time, end_time=self.end_time, ) def test_init(self): self.assertEqual(self.inst.log_group_name, 'group_name') self.assertEqual(datetime.utcfromtimestamp(self.inst.start_ms // 1000), self.start_time) self.assertEqual(datetime.utcfromtimestamp(self.inst.end_ms // 1000), self.end_time) @patch('flowlogs_reader.flowlogs_reader.boto3.client', autospec=True) def test_region(self, mock_client): # Region specified FlowLogsReader('some_group', region_name='some-region') mock_client.assert_called_with('logs', region_name='some-region') # No region specified - assume configuration file worked FlowLogsReader('some_group') mock_client.assert_called_with('logs') # No region specified and no configuration file def mock_response(*args, **kwargs): if 'region_name' not in kwargs: raise NoRegionError mock_client.side_effect = mock_response FlowLogsReader('some_group') mock_client.assert_called_with('logs', region_name=DEFAULT_REGION_NAME) def test_read_streams(self): response_list = [ { 'events': [0], 'nextToken': 'token_0' }, { 'events': [1, 2], 'nextToken': 'token_1' }, { 'events': [3, 4, 5], 'nextToken': None }, { 'events': [6], 'nextForwardToken': 'token_2' }, # Unreachable ] def mock_filter(*args, **kwargs): return response_list.pop(0) self.mock_client.filter_log_events.side_effect = mock_filter actual = list(self.inst._read_streams()) expected = [0, 1, 2, 3, 4, 5] self.assertEqual(actual, expected) def test_iteration(self): response_list = [ { 'events': [ { 'logStreamName': 'log_0', 'message': SAMPLE_RECORDS[0] }, { 'logStreamName': 'log_0', 'message': SAMPLE_RECORDS[1] }, ], 'nextToken': 'token_0', }, { 'events': [ { 'logStreamName': 'log_0', 'message': SAMPLE_RECORDS[2] }, { 'logStreamName': 'log_1', 'message': SAMPLE_RECORDS[3] }, { 'logStreamName': 'log_2', 'message': SAMPLE_RECORDS[4] }, ], }, ] def mock_filter(*args, **kwargs): return response_list.pop(0) self.mock_client.filter_log_events.side_effect = mock_filter # Calling list on the instance causes it to iterate through all records actual = list(self.inst) expected = [FlowRecord.from_message(x) for x in SAMPLE_RECORDS] self.assertEqual(actual, expected)
class FlowLogsReaderTestCase(TestCase): def setUp(self): self.mock_client = MagicMock() self.start_time = datetime(2015, 8, 12, 12, 0, 0) self.end_time = datetime(2015, 8, 12, 13, 0, 0) self.inst = FlowLogsReader( 'group_name', start_time=self.start_time, end_time=self.end_time, filter_pattern='REJECT', boto_client=self.mock_client, ) def test_init(self): self.assertEqual(self.inst.log_group_name, 'group_name') self.assertEqual(datetime.utcfromtimestamp(self.inst.start_ms // 1000), self.start_time) self.assertEqual(datetime.utcfromtimestamp(self.inst.end_ms // 1000), self.end_time) self.assertEqual(self.inst.paginator_kwargs['filterPattern'], 'REJECT') @patch('flowlogs_reader.flowlogs_reader.boto3.session', autospec=True) def test_region_name(self, mock_session): # Region specified for session FlowLogsReader('some_group', region_name='some-region') mock_session.Session.assert_called_with(region_name='some-region') # Region specified for client, not for session FlowLogsReader('some_group', boto_client_kwargs={'region_name': 'my-region'}) mock_session.Session().client.assert_called_with( 'logs', region_name='my-region') # No region specified for session or client - use the default def mock_response(*args, **kwargs): if 'region_name' not in kwargs: raise NoRegionError mock_session.Session().client.side_effect = mock_response FlowLogsReader('some_group') mock_session.Session().client.assert_called_with( 'logs', region_name=DEFAULT_REGION_NAME) @patch('flowlogs_reader.flowlogs_reader.boto3.session', autospec=True) def test_profile_name(self, mock_session): # profile_name specified FlowLogsReader('some_group', profile_name='my-profile') mock_session.Session.assert_called_with(profile_name='my-profile') # No profile specified FlowLogsReader('some_group') mock_session.Session.assert_called_with() def test_read_streams(self): paginator = MagicMock() paginator.paginate.return_value = [ { 'events': [0] }, { 'events': [1, 2] }, { 'events': [3, 4, 5] }, ] self.mock_client.get_paginator.return_value = paginator actual = list(self.inst._read_streams()) expected = [0, 1, 2, 3, 4, 5] self.assertEqual(actual, expected) def test_iteration(self): paginator = MagicMock() paginator.paginate.return_value = [ { 'events': [ { 'logStreamName': 'log_0', 'message': SAMPLE_RECORDS[0] }, { 'logStreamName': 'log_0', 'message': SAMPLE_RECORDS[1] }, ], }, { 'events': [ { 'logStreamName': 'log_0', 'message': SAMPLE_RECORDS[2] }, { 'logStreamName': 'log_1', 'message': SAMPLE_RECORDS[3] }, { 'logStreamName': 'log_2', 'message': SAMPLE_RECORDS[4] }, ], }, ] self.mock_client.get_paginator.return_value = paginator # Calling list on the instance causes it to iterate through all records actual = [next(self.inst)] + list(self.inst) expected = [FlowRecord.from_message(x) for x in SAMPLE_RECORDS] self.assertEqual(actual, expected) def test_iteration_error(self): # Simulate the paginator failing def _get_paginator(*args, **kwargs): event_0 = {'logStreamName': 'log_0', 'message': SAMPLE_RECORDS[0]} event_1 = {'logStreamName': 'log_0', 'message': SAMPLE_RECORDS[1]} for item in [{'events': [event_0, event_1]}]: yield item err_msg = '{}: {}'.format(DUPLICATE_NEXT_TOKEN_MESSAGE, 'token') raise PaginationError(message=err_msg) self.mock_client.get_paginator.return_value.paginate.side_effect = ( _get_paginator) # Don't fail if botocore's paginator raises a PaginationError actual = [next(self.inst)] + list(self.inst) expected = [FlowRecord.from_message(x) for x in SAMPLE_RECORDS[:2]] self.assertEqual(actual, expected) def test_iteration_unexpecetd_error(self): # Simulate the paginator failing def _get_paginator(*args, **kwargs): event_0 = {'logStreamName': 'log_0', 'message': SAMPLE_RECORDS[0]} yield {'events': [event_0]} raise PaginationError(message='other error') self.mock_client.get_paginator.return_value.paginate.side_effect = ( _get_paginator) # Fail for unexpected PaginationError self.assertRaises(PaginationError, lambda: list(self.inst))