def test_region_name(self, mock_client): # Region specified for session FlowLogsReader('some_group', region_name='some-region') mock_client.assert_called_with('logs', region_name='some-region') # None specified FlowLogsReader('some_group') mock_client.assert_called_with('logs')
def test_profile_name(self, mock_session): # profile_name specified FlowLogsReader('some_group', profile_name='my-profile') mock_session.Session.assert_called_with(profile_name='my-profile') # No profile specified FlowLogsReader('some_group') mock_session.Session.assert_called_with()
def test_region(self, mock_client): # Region specified FlowLogsReader('some_group', region_name='some-region') mock_client.assert_called_with('logs', region_name='some-region') # No region specified - assume configuration file worked FlowLogsReader('some_group') mock_client.assert_called_with('logs') # No region specified and no configuration file def mock_response(*args, **kwargs): if 'region_name' not in kwargs: raise NoRegionError mock_client.side_effect = mock_response FlowLogsReader('some_group') mock_client.assert_called_with('logs', region_name=DEFAULT_REGION_NAME)
def setUp(self, mock_boto3): self.mock_client = MagicMock() mock_boto3.client.return_value = self.mock_client self.start_time = datetime(2015, 8, 12, 12, 0, 0) self.end_time = datetime(2015, 8, 12, 13, 0, 0) self.inst = FlowLogsReader( 'group_name', start_time=self.start_time, end_time=self.end_time, )
def test_region_name(self, mock_session): # Region specified for session FlowLogsReader('some_group', region_name='some-region') mock_session.Session.assert_called_with(region_name='some-region') # Region specified for client, not for session FlowLogsReader('some_group', boto_client_kwargs={'region_name': 'my-region'}) mock_session.Session().client.assert_called_with( 'logs', region_name='my-region') # No region specified for session or client - use the default def mock_response(*args, **kwargs): if 'region_name' not in kwargs: raise NoRegionError mock_session.Session().client.side_effect = mock_response FlowLogsReader('some_group') mock_session.Session().client.assert_called_with( 'logs', region_name=DEFAULT_REGION_NAME)
def setUp(self): self.mock_client = MagicMock() self.start_time = datetime(2015, 8, 12, 12, 0, 0) self.end_time = datetime(2015, 8, 12, 13, 0, 0) self.inst = FlowLogsReader( 'group_name', start_time=self.start_time, end_time=self.end_time, filter_pattern='REJECT', boto_client=self.mock_client, )
def test_get_fields(self, mock_client): cwl_client = MagicMock() ec2_client = mock_client.return_value ec2_client.describe_flow_logs.return_value = { 'FlowLogs': [ {'LogFormat': '${srcaddr} ${dstaddr} ${start} ${log-status}'} ] } reader = FlowLogsReader( 'some_group', boto_client=cwl_client, fields=None, ) self.assertEqual( reader.fields, ('srcaddr', 'dstaddr', 'start', 'log_status') ) ec2_client.describe_flow_logs.assert_called_once_with( Filters=[{'Name': 'log-group-name', 'Values': ['some_group']}] )
def get_connections(account_id, region, connections=None): logging.info('Getting connections for {} {}..'.format(account_id, region)) sts = boto3.client('sts') response = sts.assume_role(RoleArn='arn:aws:iam::' + account_id + ':role/fullstop', RoleSessionName='fullstop', DurationSeconds=900) session = boto3.Session( aws_access_key_id=response['Credentials']['AccessKeyId'], aws_secret_access_key=response['Credentials']['SecretAccessKey'], aws_session_token=response['Credentials']['SessionToken'], region_name=region) ec2_client = session.client('ec2') rds = session.client('rds') instance_ids = [] interfaces = {} logging.info('%s: Collecting network interfaces..', account_id) res = ec2_client.describe_network_interfaces() lb_names = [] for iface in res['NetworkInterfaces']: if 'Association' in iface: # public IP involved interfaces[iface['NetworkInterfaceId']] = iface descr = iface.get('Description') if descr.startswith('ELB'): words = descr.split() lb_names.append(words[-1]) if 'Attachment' in iface and 'InstanceId' in iface['Attachment']: instance_ids.append(iface['Attachment']['InstanceId']) lb_dns_names = get_lb_dns_names(session, lb_names) res = rds.describe_db_instances() for db in res['DBInstances']: if db['PubliclyAccessible']: host, port = (db['Endpoint']['Address'], db['Endpoint']['Port']) try: ai = socket.getaddrinfo(host, port, family=socket.AF_INET, socktype=socket.SOCK_STREAM) except: ai = [] for _, _, _, _, ip_port in ai: ip, _ = ip_port NAMES[ip] = '/'.join((account_id, region, host)) local_names = {} instance_count = 0 logging.info('%s: Collecting public EC2 instances..', account_id) res = ec2_client.describe_instances(InstanceIds=instance_ids) for reservation in res['Reservations']: for inst in reservation['Instances']: instance_count += 1 if 'PrivateIpAddress' in inst and 'Tags' in inst: name = ''.join( [x['Value'] for x in inst['Tags'] if x['Key'] == 'Name']) local_names[inst['PrivateIpAddress']] = '/'.join( (name, inst.get('PublicIpAddress', ''))) logging.info( '%s: Got {} interfaces, {} load balancers and {} instances'.format( len(interfaces), len(lb_dns_names), instance_count), account_id) connections = collections.Counter() if connections is None else connections now = datetime.datetime.utcnow() start_time = LAST_TIMES.get(account_id, now - datetime.timedelta(minutes=10)) reader = FlowLogsReader('vpc-flowgroup', region_name=region, start_time=start_time, end_time=now) reader.logs_client = session.client('logs') record_count = 0 new_connections = 0 for record in reader: # just consider accepted packets if record.action == 'ACCEPT': record_count += 1 src = ipaddress.ip_address(record.srcaddr) # only look at packets received at public interfaces if record.interface_id in interfaces and not src.is_private: name = get_name(record.srcaddr) dest = interfaces.get(record.interface_id, {}).get('Description') if not dest or dest.startswith('Primary'): # EC2 instance if record.srcaddr in AWS_IPS and AWS_S3_DOMAIN_PATTERN.match( name.split('/')[0]): # ignore S3 public IPs # (most probably packets from S3 to public EC2) continue dest = local_names.get(record.dstaddr, record.dstaddr) elif dest.startswith('ELB'): # ELB words = dest.split() dest = lb_dns_names.get(words[-1], dest) elif dest.startswith('RDS'): # RDS instance public_ip = interfaces.get(record.interface_id, {}).get('Association', {}).get('PublicIp', '') dest = NAMES.get(public_ip, 'RDS/' + public_ip) elif dest: dest += '/' + interfaces.get(record.interface_id, {}).get( 'Association', {}).get('PublicIp', '') if 'NAT' not in dest and 'Odd' not in dest: conn = (name, dest, record.dstport) if conn not in connections: new_connections += 1 connections[conn] += 1 logging.info( '%s: Got {} records and {} new connections'.format( record_count, new_connections), account_id) LAST_TIMES[account_id] = now return connections
def test_threads(self): inst = FlowLogsReader( 'group_name', start_time=self.start_time, end_time=self.end_time, filter_pattern='REJECT', boto_client=self.mock_client, thread_count=1, ) paginators = [] def _get_paginator(operation): nonlocal paginators paginator = MagicMock() if operation == 'describe_log_streams': paginator.paginate.return_value = [ { 'logStreams': [ { 'logStreamName': 'too_late', 'firstEventTimestamp': inst.end_ms, 'lastEventTimestamp': inst.start_ms, }, { 'logStreamName': 'too_late', 'firstEventTimestamp': inst.end_ms - 1, 'lastEventTimestamp': ( inst.start_ms - LAST_EVENT_DELAY_MSEC - 1 ), }, ], }, { 'logStreams': [ { 'logStreamName': 'first_stream', 'firstEventTimestamp': inst.start_ms, 'lastEventTimestamp': inst.end_ms, }, { 'logStreamName': 'second_stream', 'firstEventTimestamp': inst.start_ms, 'lastEventTimestamp': inst.end_ms, }, ], }, ] elif operation == 'filter_log_events': paginator.paginate.return_value = [ { 'events': [ {'message': V2_RECORDS[0]}, {'message': V2_RECORDS[1]}, ], }, { 'events': [ {'message': V2_RECORDS[2]}, {'message': V2_RECORDS[3]}, ], }, ] else: self.fail('invalid operation') paginators.append(paginator) return paginator self.mock_client.get_paginator.side_effect = _get_paginator events = list(inst) self.assertEqual(len(events), 8) paginators[0].paginate.assert_called_once_with( logGroupName='group_name', orderBy='LastEventTime', descending=True, ) paginators[1].paginate.assert_called_once_with( logGroupName='group_name', startTime=inst.start_ms, endTime=inst.end_ms, interleaved=True, filterPattern='REJECT', logStreamNames=['first_stream'], ) paginators[2].paginate.assert_called_once_with( logGroupName='group_name', startTime=inst.start_ms, endTime=inst.end_ms, interleaved=True, filterPattern='REJECT', logStreamNames=['second_stream'], )