def test_setup_crawler(): org_id, root_id = build_mock_org(SIMPLE_ORG_SPEC) crawler = setup_crawler(ORG_ACCESS_ROLE) assert isinstance(crawler, crawlers.Crawler) assert len(crawler.org.accounts) == 3 assert len(crawler.org.org_units) == 6 for account in crawler.accounts: assert 'aws_access_key_id' in account.credentials assert 'aws_secret_access_key' in account.credentials assert 'aws_session_token' in account.credentials crawler = setup_crawler(ORG_ACCESS_ROLE, 'account_role', ['account02', 'account03'], ['us-west-2', 'us-east-1'], ) assert crawler.access_role == 'account_role' assert len(crawler.accounts) == 2 assert len(crawler.regions) == 2 assert set([a.name for a in crawler.accounts]) == set(['account02', 'account03']) assert set(crawler.regions) == set(['us-west-2', 'us-east-1']) with pytest.raises(TypeError): crawler = setup_crawler() with pytest.raises(ValueError): crawler = setup_crawler(ORG_ACCESS_ROLE, accounts='bogus_01') with pytest.raises(ValueError): crawler = setup_crawler(ORG_ACCESS_ROLE, regions='bogus_01')
def test_setup_crawler(): Org('no_id', 'no_role').clear_cache() MockOrganization().simple() crawler = setup_crawler(ORG_ACCESS_ROLE) assert isinstance(crawler, crawlers.Crawler) assert len(crawler.org.accounts) == 3 assert len(crawler.org.org_units) == 6 for account in crawler.accounts: assert 'aws_access_key_id' in account.credentials assert 'aws_secret_access_key' in account.credentials assert 'aws_session_token' in account.credentials crawler = setup_crawler( ORG_ACCESS_ROLE, 'account_role', ['account02', 'account03'], ['us-west-2', 'us-east-1'], ) assert crawler.access_role == 'account_role' assert len(crawler.accounts) == 2 assert len(crawler.regions) == 2 assert set([a.name for a in crawler.accounts]) == set(['account02', 'account03']) assert set(crawler.regions) == set(['us-west-2', 'us-east-1']) with pytest.raises(TypeError): crawler = setup_crawler() with pytest.raises(ValueError): crawler = setup_crawler(ORG_ACCESS_ROLE, accounts='bogus_01') with pytest.raises(ValueError): crawler = setup_crawler(ORG_ACCESS_ROLE, regions='bogus_01')
def test_make_config_service_role(): org_id, root_id = build_mock_org(SIMPLE_ORG_SPEC) crawler = setup_crawler(ORG_ACCESS_ROLE) account = crawler.accounts[0] region = crawler.regions[0] client = boto3.client('iam', region_name=region, **account.credentials) # create mock policy for role response = client.create_policy( PolicyName='AWSConfigServiceRolePolicy', Path='/aws-service-role/', PolicyDocument=json.dumps({ 'Version': '2012-10-17', 'Statement': [ { 'Effect': 'Allow', 'Action': 'config:*', 'Resource': "*", } ] }), Description='mock policy for testing test_setup_config_client()', ) role = payloads.make_config_service_role(region, account, response['Policy']['Arn']) assert isinstance(role, object) assert role.name == 'AWSServiceRoleForConfig' assert role.path == '/aws-service-role/config.amazonaws.com/' assert list(role.attached_policies.all())[0].arn == response['Policy']['Arn'] assert role.assume_role_policy_document['Statement'][0]['Action'][0] == 'sts:AssumeRole' assert role.assume_role_policy_document['Statement'][0]['Principal']['Service'][0] == 'config.amazonaws.com'
def main(master_role, config_file): """ Usage: cidrrunner -r MyIamRole -f ~/.config/cidr-runner.yaml """ config = util.load_config(config_file) crawler = setup_crawler( master_role, accounts=config['accounts'], regions=config['regions'], ) s3_bucket = util.setup_s3_bucket(config, crawler) base_obj_path = util.set_base_object_path() for payload_name in config['payloads']: #print('runnning payload: {}'.format(payload_name)) obj_path = base_obj_path + '/' + payload_name + '.json' f = eval('payload.' + payload_name) execution = crawler.execute(f) text_stream = io.StringIO() for response in execution.responses: text_stream.write(jsonfmt(response.dump()) + '\n') s3_bucket.put_object(Key=obj_path, Body=text_stream.getvalue())
def test_create_list_buckets(): org_id, root_id = build_mock_org(SIMPLE_ORG_SPEC) crawler = setup_crawler(ORG_ACCESS_ROLE) account = crawler.accounts[0] region = crawler.regions[0] response = payloads.create_bucket(region, account, 'test_bucket') assert response['ResponseMetadata']['HTTPStatusCode'] == 200 response = payloads.list_buckets(region, account) assert response['Buckets'][0] == 'test_bucket-' + account.id
def test_get_set_account_aliases(): org_id, root_id = build_mock_org(SIMPLE_ORG_SPEC) crawler = setup_crawler(ORG_ACCESS_ROLE) account = crawler.accounts[0] region = crawler.regions[0] response = payloads.set_account_alias(region, account) response = payloads.get_account_aliases(region, account) assert response['Aliases'] == account.name response = payloads.set_account_alias(region, account, alias='test_alias') response = payloads.get_account_aliases(region, account)
def test_list_hosted_zones(): org_id, root_id = build_mock_org(SIMPLE_ORG_SPEC) crawler = setup_crawler(ORG_ACCESS_ROLE) account = crawler.accounts[0] region = crawler.regions[0] client = boto3.client('route53', region_name=region, **account.credentials) client.create_hosted_zone(Name='test_zone.example.com', CallerReference='a_unique_string') response = payloads.list_hosted_zones(region, account) assert response['HostedZones'][0]['Name'] == 'test_zone.example.com.'
def test_format_responses(): org_id, root_id = build_mock_org(SIMPLE_ORG_SPEC) crawler = setup_crawler(ORG_ACCESS_ROLE) crawler.execute(payloads.get_account_aliases) execution = crawler.execute(payloads.get_account_aliases) execution_responses = format_responses(execution) print(yamlfmt(execution_responses)) assert isinstance(execution_responses, list) for response in execution_responses: assert 'Account' in response assert 'Regions' in response
def test_format_responses(): MockOrganization().simple() crawler = setup_crawler(ORG_ACCESS_ROLE) crawler.execute(payload.get_mock_account_alias) execution = crawler.execute(payload.get_mock_account_alias) execution_responses = format_responses(execution) print(yamlfmt(execution_responses)) assert isinstance(execution_responses, list) for response in execution_responses: assert 'Account' in response assert 'Regions' in response
def test_config_describe_recorder_status(): org_id, root_id = build_mock_org(SIMPLE_ORG_SPEC) crawler = setup_crawler(ORG_ACCESS_ROLE) account = crawler.accounts[0] region = crawler.regions[0] client = boto3.client('config', region_name=region, **account.credentials) client.put_configuration_recorder(ConfigurationRecorder={ 'name': 'config_test', 'roleARN': 'config_test', }) response = payloads.config_describe_recorder_status(region, account) assert response['ConfigurationRecordersStatus'][0]['name'] == 'config_test'
def test_list_hosted_zones(): MockOrganization().simple() crawler = setup_crawler(ORG_ACCESS_ROLE) account = crawler.accounts[0] region = crawler.regions[0] client = boto3.client('route53', region_name=region, **account.credentials) client.create_hosted_zone( Name='test_zone.example.com', CallerReference='a_unique_string' ) response = route53.list_hosted_zones(region, account) assert response['HostedZones'][0]['Name'] == 'test_zone.example.com.'
def test_make_delivery_channel_topic(): org_id, root_id = build_mock_org(SIMPLE_ORG_SPEC) crawler = setup_crawler(ORG_ACCESS_ROLE) account = crawler.accounts[0] region = crawler.regions[0] topic = payloads.make_delivery_channel_topic(region, account, 'config_test') #print(topic) #print(topic.attributes) assert isinstance(topic, object) assert topic.arn.rpartition(':')[2] == 'config_test' assert isinstance(topic.attributes['Policy'], str) sns_policy_document = json.loads(topic.attributes['Policy']) assert sns_policy_document['Statement'][0]['Sid'] == 'AWSConfigSNSPolicy'
def test_make_delivery_channel_bucket(): org_id, root_id = build_mock_org(SIMPLE_ORG_SPEC) crawler = setup_crawler(ORG_ACCESS_ROLE) account = crawler.accounts[0] region = crawler.regions[0] bucket = payloads.make_delivery_channel_bucket(region, account, 'config_test') #print(bucket) #print(bucket.Policy().policy) #print(dir(bucket)) assert isinstance(bucket, object) assert bucket.name == 'config_test' assert isinstance(bucket.Policy().policy, str) policy_document = json.loads(bucket.Policy().policy) assert policy_document['Statement'][0]['Sid'] == 'AWSConfigBucketPermissionsCheck' assert policy_document['Statement'][1]['Sid'] == 'AWSConfigBucketDelivery'
def main(master_role, account_role, regions, accounts, service, payload_file, payload, payload_arg): """ Arguments: \b PAYLOAD Name of the payload function to run in each account PAYLOAD_ARG The payload function argument(s) if any Orgcrawler attempts to resolve payload function name from $PYTHON_PATH Examples: \b orgcrawler -h orgcrawler -r OrgMasterRole orgcrawler.payloads.list_buckets orgcrawler -r OrgMasterRole --account-role S3Admin orgcrawler.payloads.list_buckets orgcrawler -r OrgMasterRole --service codecommit -f ~/my_payloads.py list_cc_repositories orgcrawler -r OrgMasterRole --service iam orgcrawler.payloads.get_account_aliases orgcrawler -r OrgMasterRole --accounts app-test,app-prod \\ --regions us-east-1,us-west-2 orgcrawler.payloads.config_describe_rules """ crawler_args = dict() if accounts: crawler_args['accounts'] = accounts.split(',') if service: crawler_args['regions'] = regions_for_service(service) elif regions: crawler_args['regions'] = regions.split(',') if account_role: crawler_args['account_access_role'] = account_role if payload_file: payload = get_payload_function_from_file(payload_file, payload) else: payload = get_payload_function_from_string(payload) crawler = setup_crawler(master_role, **crawler_args) execution = crawler.execute(payload, *payload_arg) click.echo(jsonfmt(format_responses(execution)))
def test_create_list_delete_buckets(): MockOrganization().simple() crawler = setup_crawler(ORG_ACCESS_ROLE) account = crawler.accounts[0] region = crawler.regions[0] bucket_name = '-'.join(['test_bucket', account.id, region]) # test create and list response = s3.create_bucket(region, account, 'test_bucket') assert 'Dryrun' in response response = s3.create_bucket(region, account, 'test_bucket', dryrun=False) assert response['CreateBucketOperation']['BucketName'] == bucket_name assert response['CreateBucketOperation']['Succeeded'] is True assert response['CreateBucketOperation']['HTTPStatusCode'] == 200 response = s3.list_buckets(region, account) assert response['Buckets'][0] == bucket_name # test delete bucket response = s3.delete_bucket(region, account, 'test_bucket') assert 'Dryrun' in response response = s3.delete_bucket(region, account, 'test_bucket', dryrun=False) assert response['DeleteBucketOperation']['BucketName'] == bucket_name assert response['DeleteBucketOperation']['Succeeded'] is True assert response['DeleteBucketOperation']['HTTPStatusCode'] == 204 # test edge case for location constraint response = s3.create_bucket('us-east-1', account, 'test_bucket', dryrun=False) assert response['CreateBucketOperation']['Succeeded'] is True # test error cases response = s3.create_bucket(region, account, 'test_bucket', dryrun=False) response = s3.create_bucket(region, account, 'test_bucket', dryrun=False) assert response['CreateBucketOperation']['Succeeded'] is False assert response['CreateBucketOperation']['ErrorCode'] == 'BucketAlreadyExists' response = s3.delete_bucket(region, account, 'test_bucket', dryrun=False) response = s3.delete_bucket(region, account, 'test_bucket', dryrun=False) assert response['DeleteBucketOperation']['Succeeded'] is False assert response['DeleteBucketOperation']['ErrorCode'] == 'NoSuchBucket'
def main(master_role, account_role, regions, accounts, service, payload_file, payload, payload_arg): ''' Where 'PAYLOAD' is name of the payload function to run in each account, and 'PAYLOAD_ARG' is, you guessed it, any payload function argument(s). Orgcrawler attempts to resolve payload function name from $PYTHON_PATH ''' crawler_args = dict() if accounts: crawler_args['accounts'] = accounts.split(',') if service: crawler_args['regions'] = regions_for_service(service) elif regions: crawler_args['regions'] = regions.split(',') if account_role: crawler_args['account_access_role'] = account_role if payload_file: payload = get_payload_function_from_file(payload_file, payload) else: payload = get_payload_function_from_string(payload) crawler = setup_crawler(master_role, **crawler_args) execution = crawler.execute(payload, *payload_arg) click.echo(jsonfmt(format_responses(execution)))
def main(master_role, aggregation_account, reporting_account, bucket_name, spec_file): if not reporting_account: reporting_account = aggregation_account print(master_role, aggregation_account, reporting_account, bucket_name, spec_file) # parse spec file spec = yaml.safe_load(spec_file.read()) #print(yamlfmt(spec['config_rules'])) #print() #print(yamlfmt([truncate_sechub_rule_name(rule_name) for rule_name in spec['config_rules']])) # get account names and alias using orgcrawler crawler = setup_crawler( master_role, regions=DEFAULT_REGION, ) #print(yamlfmt([a.dump() for a in crawler.accounts])) # get aggregation name account = crawler.org.get_account(aggregation_account) #print(account.dump()) botoConfig = botocore.client.Config(connect_timeout=2, read_timeout=10, retries={"max_attempts": 2}) client = boto3.client('config', config=botoConfig, region_name=DEFAULT_REGION, **account.credentials) response = client.describe_configuration_aggregators( #ConfigurationAggregatorNames=[ # 'string', #], ) #print(response) aggrigator_name = next( (agg['ConfigurationAggregatorName'] for agg in response['ConfigurationAggregators']), None, ) #print(aggrigator_name) # get compliance data if aggrigator_name is not None: compliance_generator = paginate( client, client.describe_aggregate_compliance_by_config_rules, ConfigurationAggregatorName=aggrigator_name, ) #print(next(compliance_generator)) else: sys.exit('could not determine ConfigurationAggregatorName') # assemble config rule compliance data text_stream = io.StringIO() for item in compliance_generator: rule_name = truncate_sechub_rule_name(item['ConfigRuleName']) if is_in_scope(spec, rule_name): compliance_data = dict( config_rule_name=rule_name, compliance_type=item['Compliance']['ComplianceType'], non_compliant_resource_count=get_resource_count(item), account_id=item['AccountId'], account_name=crawler.org.get_account_name_by_id( item['AccountId']), region=item['AwsRegion'], timestamp=timestamp(), ) text_stream.write(json.dumps(compliance_data) + '\n') #else: # print('out of scope: ', compliance_data['AccountName'], rule_name) #print(text_stream.getvalue()) # upload to s3 day = datetime.now().day month = datetime.now().month year = datetime.now().year obj_path = 'aggregate_compliance_by_config_rules/{}/{}/{}/compliance_data.json'.format( year, month, day) print(obj_path) account = crawler.org.get_account(reporting_account) bucket_name = bucket_name + '-' + account.id print(bucket_name) s3_client = boto3.client('s3', region_name=DEFAULT_REGION, **account.credentials) try: s3_client.create_bucket( ACL='private', Bucket=bucket_name, CreateBucketConfiguration={'LocationConstraint': DEFAULT_REGION}) except s3_client.exceptions.BucketAlreadyOwnedByYou as e: pass s3_client.put_object( Bucket=bucket_name, Key=obj_path, Body=text_stream.getvalue(), )
def test_enable_config(): org_id, root_id = build_mock_org(SIMPLE_ORG_SPEC) crawler = setup_crawler(ORG_ACCESS_ROLE) account = crawler.accounts[0] region = crawler.regions[0]