def test_reserve_hostclass_ip_addresses(self, meta_network_mock, boto3_resource_mock, boto3_client_mock, config_mock, sleep_mock, gateways_mock, sns_mock, endpoints_mock, rds_mock): """Test hostclass IP addresses are being reserved during VPC creation""" config_mock.return_value = get_mock_config({ 'envtype:auto-vpc-type': { 'ip_space': '10.0.0.0/24', 'vpc_cidr_size': '26', 'intranet_cidr': 'auto', 'tunnel_cidr': 'auto', 'dmz_cidr': 'auto', 'maintenance_cidr': 'auto', 'ntp_server': '10.0.0.5' } }) # pylint: disable=C0103 def _create_vpc_mock(CidrBlock): return { 'Vpc': { 'CidrBlock': CidrBlock, 'VpcId': 'mock_vpc_id', 'DhcpOptionsId': 'mock_dhcp_options_id' } } client_mock = MagicMock() client_mock.create_vpc.side_effect = _create_vpc_mock client_mock.get_all_zones.return_value = [MagicMock()] client_mock.describe_dhcp_options.return_value = { 'DhcpOptions': [MagicMock()] } boto3_client_mock.return_value = client_mock network_mock = MagicMock() meta_network_mock.return_value = network_mock DiscoVPC('auto-vpc', 'auto-vpc-type', aws_config=get_mock_config()) expected_calls = [] default_config = get_default_config_dict() for section in default_config: if section.startswith("mhc") and default_config[section].get( "ip_address"): expected_calls.append( call(default_config[section].get("ip_address"))) network_mock.get_interface.assert_has_calls(expected_calls)
def setUp(self): self.log_metrics = DiscoLogMetrics('test-env') self.log_metrics.logs = MagicMock() config_mock = PropertyMock(return_value=get_mock_config({ 'mhcdummy.metric_name': { 'log_file': '/error_log', 'filter_pattern': 'error', 'metric_value': 1 } })) type(self.log_metrics).config = config_mock # pylint: disable=C0103 def _describe_log_groups(logGroupNamePrefix): if logGroupNamePrefix == 'test-env/': # getting all log metrics in env return {'logGroups': [{'logGroupName': 'test-env/mhcdummy/info_log'}, {'logGroupName': 'test-env/mhcbanana/warning_log'}]} # getting all log metrics for hostclass return {'logGroups': [{'logGroupName': 'test-env/mhcdummy/info_log'}]} # pylint: disable=C0103 def _describe_metric_filters(logGroupName): if logGroupName == 'test-env/mhcdummy/info_log': return {'metricFilters': [{'filterName': 'mhcdummy_metric'}]} elif logGroupName == 'test-env/mhcbanana/warning_log': return {'metricFilters': [{'filterName': 'mhcbanana_metric'}]} self.log_metrics.logs.describe_log_groups.side_effect = _describe_log_groups self.log_metrics.logs.describe_metric_filters.side_effect = _describe_metric_filters
def test_update_peering_connections(self, config_mock, create_peering_route_mock): """ Verify new peering connections are created properly """ config_mock.return_value = get_mock_config({ 'peerings': { 'connection_1': 'mock-vpc-1:sandbox/intranet mock-vpc-2:sandbox/intranet' } }) # End setting up test # Calling method under test self.disco_vpc_peerings.update_peering_connections(self.disco_vpc1) # Asserting correct behavior peeerings = self.client.describe_vpc_peering_connections().get('VpcPeeringConnections') self.assertEqual(1, len(peeerings)) peering_id = peeerings[0]['VpcPeeringConnectionId'] self.assertEqual(self.disco_vpc1.get_vpc_id(), peeerings[0]['RequesterVpcInfo']['VpcId']) self.assertEqual(self.disco_vpc2.get_vpc_id(), peeerings[0]['AccepterVpcInfo']['VpcId']) # create_peering_route should have been called twice, once for each VPC create_peering_route_mock.assert_called_with(peering_id, '10.101.0.0/20') self.assertEqual(2, create_peering_route_mock.call_count)
def setUp(self): with patch('disco_aws_automation.disco_rds.DiscoVPCSecurityGroupRules', return_value=_get_vpc_sg_rules_mock()): self.rds = RDS(TEST_ENV_NAME, 'testdbname', MOCK_SG_GROUP_ID, ['mock_subnet_id'], 'example.com') self.rds.client = MagicMock() self.rds.config_rds = get_mock_config({ 'some-env-db-name': { 'engine': 'oracle', 'allocated_storage': '100', 'db_instance_class': 'db.m4.2xlarge', 'engine_version': '12.1.0.2.v2', 'master_username': '******', 'product_line': 'mock_productline' }, 'some-env-db-name-with-windows': { 'engine': 'oracle', 'allocated_storage': '100', 'db_instance_class': 'db.m4.2xlarge', 'engine_version': '12.1.0.2.v2', 'master_username': '******', 'preferred_backup_window': MOCK_BACKUP_WINDOW, 'preferred_maintenance_window': MOCK_MAINTENANCE_WINDOW, 'product_line': 'mock_productline' } })
def test_clone_uses_latest_snapshot(self, bucket_mock, r53_mock, vpc_mock): """test that an RDS clone uses the latest available snapshot""" self.rds._get_db_instance = MagicMock(return_value=None) self.rds.config_rds = get_mock_config({ 'some-env-db-name': { 'engine': 'oracle', 'allocated_storage': '100', 'db_instance_class': 'db.m4.2xlarge', 'engine_version': '12.1.0.2.v2', 'master_username': '******', 'product_line': 'mock_productline' } }) self.rds.client.describe_db_snapshots.return_value = { 'DBSnapshots': [{ 'DBSnapshotIdentifier': 'foo-snapshot', 'SnapshotCreateTime': datetime.datetime(2016, 1, 13) }, { 'DBSnapshotIdentifier': 'foo-snapshot2', 'SnapshotCreateTime': datetime.datetime(2016, 1, 14) }] } self.rds.clone('some-env', 'db-name') actual = self.rds.client.restore_db_instance_from_db_snapshot.call_args[ 1]['DBSnapshotIdentifier'] self.assertEqual('foo-snapshot2', actual)
def test_provision_hc_with_chaos_using_config(self, mock_config, **kwargs): """ Provision creates the proper launch configuration and autoscaling group with chaos from config """ config_dict = get_default_config_dict() config_dict["mhcunittest"]["chaos"] = "True" aws = DiscoAWS(config=get_mock_config(config_dict), environment_name=TEST_ENV_NAME, log_metrics=MagicMock()) mock_ami = self._get_image_mock(aws) aws.update_elb = MagicMock(return_value=None) aws.discogroup.elastigroup.spotinst_client = MagicMock() with patch("disco_aws_automation.DiscoAWS.get_meta_network", return_value=_get_meta_network_mock()): with patch("boto.ec2.connection.EC2Connection.get_all_snapshots", return_value=[]): with patch("disco_aws_automation.DiscoAWS.create_scaling_schedule", return_value=None): with patch("boto.ec2.autoscale.AutoScaleConnection.create_or_update_tags", return_value=None): metadata = aws.provision(ami=mock_ami, hostclass="mhcunittest", owner="unittestuser", min_size=1, desired_size=1, max_size=1) self.assertEqual(metadata["hostclass"], "mhcunittest") self.assertFalse(metadata["no_destroy"]) self.assertTrue(metadata["chaos"]) _lc = aws.discogroup.get_configs()[0] self.assertRegexpMatches(_lc.name, r".*_mhcunittest_[0-9]*") self.assertEqual(_lc.image_id, mock_ami.id) self.assertTrue(aws.discogroup.get_existing_group(hostclass="mhcunittest")) _ag = aws.discogroup.get_existing_groups()[0] self.assertRegexpMatches(_ag['name'], r"unittestenv_mhcunittest_[0-9]*") self.assertEqual(_ag['min_size'], 1) self.assertEqual(_ag['max_size'], 1) self.assertEqual(_ag['desired_capacity'], 1)
def _get_vpc_mock(random_subnet_mock=None, meta_network_mock=None, boto3_resource_mock=None, boto3_client_mock=None, config_mock=None, gateways_mock=None, sns_mock=None, endpoints_mock=None, rds_mock=None): config_mock.return_value = get_mock_config({ 'envtype:auto-vpc-type': { 'ip_space': '10.0.0.0/24', 'vpc_cidr_size': '26', 'intranet_cidr': 'auto', 'tunnel_cidr': 'auto', 'dmz_cidr': 'auto', 'maintenance_cidr': 'auto', 'ntp_server': '10.0.0.5' } }) # pylint: disable=C0103 def _create_vpc_mock(CidrBlock): return {'Vpc': {'CidrBlock': CidrBlock, 'VpcId': 'mock_vpc_id', 'DhcpOptionsId': 'mock_dhcp_options_id'}} random_subnet_mock.return_value = IPNetwork('10.0.0.0/26') client_mock = MagicMock() client_mock.create_vpc.side_effect = _create_vpc_mock client_mock.describe_dhcp_options.return_value = {'DhcpOptions': [MagicMock()]} boto3_client_mock.return_value = client_mock ret = DiscoVPC(TEST_ENV_NAME, 'auto-vpc-type') return ret
def test_update_peering_connections(self, config_mock, create_peering_route_mock): """ Verify new peering connections are created properly """ config_mock.return_value = get_mock_config({ 'peerings': { 'connection_1': 'mock-vpc-1:sandbox/intranet mock-vpc-2:sandbox/intranet' } }) # End setting up test # Calling method under test self.disco_vpc_peerings.update_peering_connections(self.disco_vpc1) # Asserting correct behavior peeerings = self.client.describe_vpc_peering_connections().get( 'VpcPeeringConnections') self.assertEqual(1, len(peeerings)) peering_id = peeerings[0]['VpcPeeringConnectionId'] self.assertEqual(self.disco_vpc1.get_vpc_id(), peeerings[0]['RequesterVpcInfo']['VpcId']) self.assertEqual(self.disco_vpc2.get_vpc_id(), peeerings[0]['AccepterVpcInfo']['VpcId']) # create_peering_route should have been called twice, once for each VPC create_peering_route_mock.assert_called_with(peering_id, '10.101.0.0/20') self.assertEqual(2, create_peering_route_mock.call_count)
def setUp(self): with patch('disco_aws_automation.disco_rds.DiscoVPCSecurityGroupRules', return_value=_get_vpc_sg_rules_mock()): self.rds = RDS(TEST_ENV_NAME, 'testdbname', MOCK_SG_GROUP_ID, ['mock_subnet_id'], 'example.com') self.rds.client = MagicMock() self.rds.config_rds = get_mock_config({ 'some-env-db-name': { 'engine': 'oracle', 'allocated_storage': '100', 'db_instance_class': 'db.m4.2xlarge', 'engine_version': '12.1.0.2.v2', 'master_username': '******', 'product_line': 'mock_productline' }, 'some-env-db-name-with-windows': { 'engine': 'oracle', 'allocated_storage': '100', 'db_instance_class': 'db.m4.2xlarge', 'engine_version': '12.1.0.2.v2', 'master_username': '******', 'preferred_backup_window': MOCK_BACKUP_WINDOW, 'preferred_maintenance_window': MOCK_MAINTENANCE_WINDOW, 'product_line': 'mock_productline' } })
def setUp(self): config_dict = get_default_config_dict() self.chaos = DiscoChaos(config=get_mock_config(config_dict), environment_name=TEST_ENV_NAME, level=25.0, retainage=30.0) self.chaos._disco_aws = create_autospec(DiscoAWS)
def test_create_auto_vpc(self, meta_network_mock, boto3_resource_mock, boto3_client_mock, config_mock, sleep_mock, gateways_mock, sns_mock, endpoints_mock, rds_mock): """Test creating a VPC with a dynamic ip range""" # FIXME This needs to mock way too many things. DiscoVPC needs to be refactored config_mock.return_value = get_mock_config({ 'envtype:auto-vpc-type': { 'ip_space': '10.0.0.0/24', 'vpc_cidr_size': '26', 'intranet_cidr': 'auto', 'tunnel_cidr': 'auto', 'dmz_cidr': 'auto', 'maintenance_cidr': 'auto', 'ntp_server': '10.0.0.5' } }) # pylint: disable=C0103 def _create_vpc_mock(CidrBlock): return {'Vpc': {'CidrBlock': CidrBlock, 'VpcId': 'mock_vpc_id', 'DhcpOptionsId': 'mock_dhcp_options_id'}} client_mock = MagicMock() client_mock.create_vpc.side_effect = _create_vpc_mock client_mock.get_all_zones.return_value = [MagicMock()] client_mock.describe_dhcp_options.return_value = {'DhcpOptions': [MagicMock()]} boto3_client_mock.return_value = client_mock auto_vpc = DiscoVPC('auto-vpc', 'auto-vpc-type') possible_vpcs = ['10.0.0.0/26', '10.0.0.64/26', '10.0.0.128/26', '10.0.0.192/26'] self.assertIn(str(auto_vpc.vpc['CidrBlock']), possible_vpcs)
def test_clone_uses_latest_snapshot(self, bucket_mock, r53_mock, vpc_mock): """test that an RDS clone uses the latest available snapshot""" self.rds._get_db_instance = MagicMock(return_value=None) self.rds.config_rds = get_mock_config({ 'some-env-db-name': { 'engine': 'oracle', 'allocated_storage': '100', 'db_instance_class': 'db.m4.2xlarge', 'engine_version': '12.1.0.2.v2', 'master_username': '******', 'product_line': 'mock_productline' } }) self.rds.client.describe_db_snapshots.return_value = { 'DBSnapshots': [{ 'DBSnapshotIdentifier': 'foo-snapshot', 'SnapshotCreateTime': datetime.datetime(2016, 1, 13) }, { 'DBSnapshotIdentifier': 'foo-snapshot2', 'SnapshotCreateTime': datetime.datetime(2016, 1, 14) }] } self.rds.clone('some-env', 'db-name') actual = self.rds.client.restore_db_instance_from_db_snapshot.call_args[1]['DBSnapshotIdentifier'] self.assertEqual('foo-snapshot2', actual)
def test_create_vpc_with_custom_tags(self, boto3_resource_mock, boto3_client_mock, config_mock, endpoints_mock, sns_mock, rds_mock): """Test creating a VPC with a dynamic ip range and tags""" # FIXME This needs to mock way too many things. DiscoVPC needs to be refactored config_mock.return_value = get_mock_config({ 'envtype:auto-vpc-type': { 'ip_space': '10.0.0.0/24', 'vpc_cidr_size': '26', 'intranet_cidr': 'auto', 'tunnel_cidr': 'auto', 'dmz_cidr': 'auto', 'maintenance_cidr': 'auto', 'ntp_server': '10.0.0.5' } }) # pylint: disable=C0103 def _create_vpc_mock(CidrBlock): return {'Vpc': {'CidrBlock': CidrBlock, 'VpcId': 'mock_vpc_id', 'DhcpOptionsId': 'mock_dhcp_options_id'}} client_mock = MagicMock() client_mock.create_vpc.side_effect = _create_vpc_mock boto3_client_mock.return_value = client_mock resource_mock = MagicMock() resource_mock.Vpc.create_tags.return_value = [] boto3_resource_mock.return_value = resource_mock client_mock.describe_vpn_gateways.return_value = {'VpnGateways': []} my_tags_options = [{'Value': 'astronauts', 'Key': 'productline'}, {'Value': 'tag_value', 'Key': 'mytag'}] DiscoVPC._get_vpc_cidr = MagicMock() DiscoVPC._get_vpc_cidr.return_value = '10.0.0.0/26' with patch("disco_aws_automation.DiscoVPC._create_new_meta_networks", return_value=MagicMock(return_value={})): with patch("disco_aws_automation.DiscoVPC._update_dhcp_options", return_value=None): # The expect list of tag dictionaries expected_vpc_tags = [{'Value': 'auto-vpc', 'Key': 'Name'}, {'Value': 'auto-vpc-type', 'Key': 'type'}, {'Value': 'ANY', 'Key': 'create_date'}, {'Value': 'astronauts', 'Key': 'productline'}, {'Value': 'tag_value', 'Key': 'mytag'}] DiscoVPC('auto-vpc', 'auto-vpc-type', vpc_tags=my_tags_options) # Get the create_tags argument call_args_tags = resource_mock.Vpc.return_value.create_tags.call_args[1] # Verify Option Name self.assertEqual(['Tags'], call_args_tags.keys()) call_tags_dict = call_args_tags['Tags'] # Verify the number of tag Dictionaries in the list self.assertEqual(5, len(call_tags_dict)) # Verify each tag options for tag_option in call_tags_dict: if tag_option['Key'] == 'create_date': tag_option['Value'] = 'ANY' self.assertIn(tag_option, expected_vpc_tags)
def test_create_meta_networks_static_dynamic(self, meta_network_mock, config_mock, endpoints_mock): """Test creating meta networks with a mix of static and dynamic ip ranges""" vpc_mock = {'CidrBlock': '10.0.0.0/28', 'VpcId': 'mock_vpc_id', 'DhcpOptionsId': 'mock_dhcp_options_id'} config_mock.return_value = get_mock_config({ 'envtype:auto-vpc-type': { 'vpc_cidr': '10.0.0.0/28', 'intranet_cidr': 'auto', 'tunnel_cidr': 'auto', 'dmz_cidr': '10.0.0.4/31', 'maintenance_cidr': 'auto' } }) def _create_meta_network_mock(network_name, vpc, cidr): ret = MagicMock() ret.name = network_name ret.vpc = vpc ret.network_cidr = cidr return ret meta_network_mock.side_effect = _create_meta_network_mock auto_vpc = DiscoVPC('auto-vpc', 'auto-vpc-type', vpc_mock) meta_networks = auto_vpc._create_new_meta_networks() self.assertItemsEqual(['intranet', 'tunnel', 'dmz', 'maintenance'], meta_networks.keys()) expected_ip_ranges = ['10.0.0.0/30', '10.0.0.4/31', '10.0.0.8/30', '10.0.0.12/30'] actual_ip_ranges = [str(meta_network.network_cidr) for meta_network in meta_networks.values()] self.assertItemsEqual(actual_ip_ranges, expected_ip_ranges)
def test_update_nat_gateways_and_routes(self, config_mock, meta_network_mock): """ Verify NAT gateways and the routes to them are created properly """ config_mock.return_value = get_mock_config({ 'envtype:sandbox': { 'ip_space': '10.0.0.0/24', 'vpc_cidr_size': '26', 'intranet_cidr': 'auto', 'tunnel_cidr': 'auto', 'dmz_cidr': 'auto', 'maintenance_cidr': 'auto', 'ntp_server': '10.0.0.5', 'tunnel_nat_gateways': '10.1.0.4,10.1.0.5,10.1.0.6', 'intranet_nat_gateways': 'auto', 'nat_gateway_routes': 'intranet/tunnel' } }) network_intranet_mock = MagicMock() network_dmz_mock = MagicMock() network_maintenance_mock = MagicMock() network_tunnel_mock = MagicMock() def _meta_network_mock(name, vpc, network_cidr=None, boto3_connection=None): if name == 'intranet': ret = network_intranet_mock elif name == 'dmz': ret = network_dmz_mock elif name == 'maintenance': ret = network_maintenance_mock elif name == 'tunnel': ret = network_tunnel_mock else: return None ret.name = name ret.vpc = vpc ret.subnet_ids = ["subnet-cafe", "subnet-beef"] ret.get_nat_gateway_metanetwork.return_value = None if network_cidr: ret.network_cidr = IPNetwork(network_cidr) else: ret.network_cidr = IPNetwork('10.0.0.0/26') return ret meta_network_mock.side_effect = _meta_network_mock # End of setting up test # Calling method under test self.disco_vpc_gateways.update_nat_gateways_and_routes() # Verifying correct behavior network_intranet_mock.upsert_nat_gateway_route.assert_called_once_with(network_tunnel_mock) network_tunnel_mock.add_nat_gateways.assert_called_once_with(allocation_ids=[ self.disco_vpc_gateways.eip.find_eip_address('eip').allocation_id, self.disco_vpc_gateways.eip.find_eip_address('eip').allocation_id, self.disco_vpc_gateways.eip.find_eip_address('eip').allocation_id ])
def test_socify_helper_constr_no_soc_config(self): """Test SocifyHelper Constructor when the socify section is missing from the config""" soc_helper = SocifyHelper("AL-1102", False, "ExampleEvent", env="test_env", config=get_mock_config({})) self.assertFalse(hasattr(soc_helper, '_socify_url'))
def test_eligible_instances_retainage_zero(self): """Test that retainage of zero retatins nothing""" config_dict = get_default_config_dict() self.chaos = DiscoChaos(config=get_mock_config(config_dict), environment_name=TEST_ENV_NAME, level=25.0, retainage=0.0) self.chaos._groups = [self._mock_group()] self.assertEqual(len(self.chaos._termination_eligible_instances()), 3)
def test_socify_helper_constr_no_soc_config(self): """Test SocifyHelper Constructor when the socify section is missing from the config""" soc_helper = SocifyHelper("AL-1102", False, "ExampleEvent", env="test_env", config=get_mock_config({})) self.assertFalse(hasattr(soc_helper, '_socify_url'))
def setUp(self): """Pre-test setup""" # config_mock.get_asiaq_option.return_value = "fake_account_id" config_aws = get_mock_config(MOCK_AWS_CONFIG_DEFINITION) mock_token = "foo" self.spotinst_client = SpotinstClient( token=mock_token, environment_name="fakeenvironment", config_aws=config_aws)
def test_create_userdata_without_spotinst(self, **kwargs): """ create_userdata doesn't set 'spotinst' key """ config_dict = get_default_config_dict() aws = DiscoAWS(config=get_mock_config(config_dict), environment_name=TEST_ENV_NAME) user_data = aws.create_userdata(hostclass="mhcunittest", owner="unittestuser", is_spotinst=False) self.assertEqual(user_data["is_spotinst"], "0")
def test_create_userdata_without_spotinst(self, **kwargs): """ create_userdata doesn't set 'spotinst' key """ config_dict = get_default_config_dict() aws = DiscoAWS(config=get_mock_config(config_dict), environment_name=TEST_ENV_NAME) user_data = aws.create_userdata(hostclass="mhcunittest", owner="unittestuser", is_spotinst=False) self.assertEqual(user_data["is_spotinst"], "0")
def test_eligible_instances_retainage_zero(self): """Test that retainage of zero retatins nothing""" config_dict = get_default_config_dict() self.chaos = DiscoChaos(config=get_mock_config(config_dict), environment_name=TEST_ENV_NAME, level=25.0, retainage=0.0) self.chaos._groups = [self._mock_group()] self.assertEqual(len(self.chaos._termination_eligible_instances()), 3)
def test_create_userdata_with_zookeeper(self, **kwargs): """ create_userdata sets 'zookeepers' key """ config_dict = get_default_config_dict() aws = DiscoAWS(config=get_mock_config(config_dict), environment_name=TEST_ENV_NAME) user_data = aws.create_userdata(hostclass="mhcunittest", owner="unittestuser") self.assertEqual(user_data["zookeepers"], "[\\\"mhczookeeper-{}.example.com:2181\\\"]".format( aws.vpc.environment_name))
def test_create_userdata_with_zookeeper(self, **kwargs): """ create_userdata sets 'zookeepers' key """ config_dict = get_default_config_dict() aws = DiscoAWS(config=get_mock_config(config_dict), environment_name=TEST_ENV_NAME) user_data = aws.create_userdata(hostclass="mhcunittest", owner="unittestuser") self.assertEqual(user_data["zookeepers"], "[\\\"mhczookeeper-{}.example.com:2181\\\"]".format( aws.vpc.environment_name))
def setUp(self): soc_config = { 'socify': {'socify_baseurl': 'https://socify-ci.aws.wgen.net/soc'} } self._soc_helper = SocifyHelper("AL-1102", False, "ExampleEvent", env="test_env", config=get_mock_config(soc_config)) self._soc_helper.ami_id = "ami_12345"
def test_create_userdata_with_eip(self, **kwargs): """ create_userdata sets 'eip' key when an EIP is required """ config_dict = get_default_config_dict() eip = "54.201.250.76" config_dict["mhcunittest"]["eip"] = eip aws = DiscoAWS(config=get_mock_config(config_dict), environment_name=TEST_ENV_NAME) user_data = aws.create_userdata(hostclass="mhcunittest", owner="unittestuser") self.assertEqual(user_data["eip"], eip)
def test_create_userdata_with_eip(self, **kwargs): """ create_userdata sets 'eip' key when an EIP is required """ config_dict = get_default_config_dict() eip = "54.201.250.76" config_dict["mhcunittest"]["eip"] = eip aws = DiscoAWS(config=get_mock_config(config_dict), environment_name=TEST_ENV_NAME) user_data = aws.create_userdata(hostclass="mhcunittest", owner="unittestuser") self.assertEqual(user_data["eip"], eip)
def setUp(self): soc_config = { 'socify': { 'socify_baseurl': 'https://socify-ci.aws.wgen.net/soc' } } self._soc_helper = SocifyHelper("AL-1102", False, "ExampleEvent", env="test_env", config=get_mock_config(soc_config)) self._soc_helper.ami_id = "ami_12345"
def test_socify_helper_constr(self): """Test SocifyHelper Constructor with valid data""" soc_config = { 'socify': {'socify_baseurl': 'https://socify-ci.aws.wgen.net/soc'} } soc_helper = SocifyHelper("AL-1102", False, "ExampleEvent", env="test_env", config=get_mock_config(soc_config)) self.assertEqual("https://socify-ci.aws.wgen.net/soc", soc_helper._socify_url)
def test_socify_helper_constr_no_soc_baseurl(self): """Test SocifyHelper Constructor when the socify base_url is missing from the config""" soc_config = { 'socify': {'baseurl': 'https://socify-ci.aws.wgen.net/soc'} } soc_helper = SocifyHelper("AL-1102", False, "ExampleEvent", env="test_env", config=get_mock_config(soc_config)) self.assertFalse(hasattr(soc_helper, '_socify_url'))
def test_reserve_hostclass_ip_addresses(self, meta_network_mock, boto3_resource_mock, boto3_client_mock, config_mock, sleep_mock, gateways_mock, sns_mock, endpoints_mock, rds_mock): """Test hostclass IP addresses are being reserved during VPC creation""" config_mock.return_value = get_mock_config({ 'envtype:auto-vpc-type': { 'ip_space': '10.0.0.0/24', 'vpc_cidr_size': '26', 'intranet_cidr': 'auto', 'tunnel_cidr': 'auto', 'dmz_cidr': 'auto', 'maintenance_cidr': 'auto', 'ntp_server': '10.0.0.5' } }) # pylint: disable=C0103 def _create_vpc_mock(CidrBlock): return {'Vpc': {'CidrBlock': CidrBlock, 'VpcId': 'mock_vpc_id', 'DhcpOptionsId': 'mock_dhcp_options_id'}} client_mock = MagicMock() client_mock.create_vpc.side_effect = _create_vpc_mock client_mock.get_all_zones.return_value = [MagicMock()] client_mock.describe_dhcp_options.return_value = {'DhcpOptions': [MagicMock()]} boto3_client_mock.return_value = client_mock network_mock = MagicMock() meta_network_mock.return_value = network_mock DiscoVPC('auto-vpc', 'auto-vpc-type', aws_config=get_mock_config()) expected_calls = [] default_config = get_default_config_dict() for section in default_config: if section.startswith("mhc") and default_config[section].get("ip_address"): expected_calls.append(call(default_config[section].get("ip_address"))) network_mock.get_interface.assert_has_calls(expected_calls)
def test_socify_helper_constr_no_soc_baseurl(self): """Test SocifyHelper Constructor when the socify base_url is missing from the config""" soc_config = { 'socify': { 'baseurl': 'https://socify-ci.aws.wgen.net/soc' } } soc_helper = SocifyHelper("AL-1102", False, "ExampleEvent", env="test_env", config=get_mock_config(soc_config)) self.assertFalse(hasattr(soc_helper, '_socify_url'))
def test_socify_helper_constr(self): """Test SocifyHelper Constructor with valid data""" soc_config = { 'socify': { 'socify_baseurl': 'https://socify-ci.aws.wgen.net/soc' } } soc_helper = SocifyHelper("AL-1102", False, "ExampleEvent", env="test_env", config=get_mock_config(soc_config)) self.assertEqual("https://socify-ci.aws.wgen.net/soc", soc_helper._socify_url)
def test_get_alarm_config_elb_metric(self): """Test DiscoAlarmsConfig get_alarms for ELB metrics""" disco_alarms_config = DiscoAlarmsConfig(ENVIRONMENT, autoscale=self.autoscale) disco_alarms_config.config = get_mock_config({ 'reporting.AWS/ELB.HealthyHostCount.mhcbanana': { 'threshold_min': '1', 'duration': '60', 'period': '5', 'statistic': 'Minimum', 'custom_metric': 'false', 'level': 'critical' } }) alarm_configs = disco_alarms_config.get_alarms('mhcbanana') self.assertEqual(1, len(alarm_configs)) self.assertEqual({'LoadBalancerName': DiscoELB.get_elb_id('testenv', 'mhcbanana')}, alarm_configs[0].dimensions)
def test_create_meta_networks_static_dynamic(self, meta_network_mock, config_mock, endpoints_mock): """Test creating meta networks with a mix of static and dynamic ip ranges""" vpc_mock = { 'CidrBlock': '10.0.0.0/28', 'VpcId': 'mock_vpc_id', 'DhcpOptionsId': 'mock_dhcp_options_id' } config_mock.return_value = get_mock_config({ 'envtype:auto-vpc-type': { 'vpc_cidr': '10.0.0.0/28', 'intranet_cidr': 'auto', 'tunnel_cidr': 'auto', 'dmz_cidr': '10.0.0.4/31', 'maintenance_cidr': 'auto' } }) def _create_meta_network_mock(network_name, vpc, cidr): ret = MagicMock() ret.name = network_name ret.vpc = vpc ret.network_cidr = cidr return ret meta_network_mock.side_effect = _create_meta_network_mock auto_vpc = DiscoVPC('auto-vpc', 'auto-vpc-type', vpc_mock) meta_networks = auto_vpc._create_new_meta_networks() self.assertItemsEqual(['intranet', 'tunnel', 'dmz', 'maintenance'], meta_networks.keys()) expected_ip_ranges = [ '10.0.0.0/30', '10.0.0.4/31', '10.0.0.8/30', '10.0.0.12/30' ] actual_ip_ranges = [ str(meta_network.network_cidr) for meta_network in meta_networks.values() ] self.assertItemsEqual(actual_ip_ranges, expected_ip_ranges)
def _get_elb_config(self, overrides=None): overrides = overrides or {} config = get_default_config_dict() config["mhcelb"] = { "subnet": "intranet", "security_group": "intranet", "ssh_key_name": "unittestkey", "instance_profile_name": "unittestprofile", "public_ip": "False", "ip_address": None, "eip": None, "domain_name": "example.com", "elb": "yes", "elb_health_check_url": "/foo", "product_line": "mock_productline" } config["mhcelb"].update(overrides) return get_mock_config(config)
def _get_elb_config(self, overrides=None): overrides = overrides or {} config = get_default_config_dict() config["mhcelb"] = { "subnet": "intranet", "security_group": "intranet", "ssh_key_name": "unittestkey", "instance_profile_name": "unittestprofile", "public_ip": "False", "ip_address": None, "eip": None, "domain_name": "example.com", "elb": "yes", "elb_health_check_url": "/foo", "product_line": "mock_productline" } config["mhcelb"].update(overrides) return get_mock_config(config)
def _get_vpc_mock(random_subnet_mock=None, meta_network_mock=None, boto3_resource_mock=None, boto3_client_mock=None, config_mock=None, gateways_mock=None, sns_mock=None, endpoints_mock=None, rds_mock=None): config_mock.return_value = get_mock_config({ 'envtype:auto-vpc-type': { 'ip_space': '10.0.0.0/24', 'vpc_cidr_size': '26', 'intranet_cidr': 'auto', 'tunnel_cidr': 'auto', 'dmz_cidr': 'auto', 'maintenance_cidr': 'auto', 'ntp_server': '10.0.0.5' } }) # pylint: disable=C0103 def _create_vpc_mock(CidrBlock): return { 'Vpc': { 'CidrBlock': CidrBlock, 'VpcId': 'mock_vpc_id', 'DhcpOptionsId': 'mock_dhcp_options_id' } } random_subnet_mock.return_value = IPNetwork('10.0.0.0/26') client_mock = MagicMock() client_mock.create_vpc.side_effect = _create_vpc_mock client_mock.describe_dhcp_options.return_value = { 'DhcpOptions': [MagicMock()] } boto3_client_mock.return_value = client_mock ret = DiscoVPC(TEST_ENV_NAME, 'auto-vpc-type') return ret
def test_create_auto_vpc(self, meta_network_mock, boto3_resource_mock, boto3_client_mock, config_mock, sleep_mock, gateways_mock, sns_mock, endpoints_mock, rds_mock): """Test creating a VPC with a dynamic ip range""" # FIXME This needs to mock way too many things. DiscoVPC needs to be refactored config_mock.return_value = get_mock_config({ 'envtype:auto-vpc-type': { 'ip_space': '10.0.0.0/24', 'vpc_cidr_size': '26', 'intranet_cidr': 'auto', 'tunnel_cidr': 'auto', 'dmz_cidr': 'auto', 'maintenance_cidr': 'auto', 'ntp_server': '10.0.0.5' } }) # pylint: disable=C0103 def _create_vpc_mock(CidrBlock): return { 'Vpc': { 'CidrBlock': CidrBlock, 'VpcId': 'mock_vpc_id', 'DhcpOptionsId': 'mock_dhcp_options_id' } } client_mock = MagicMock() client_mock.create_vpc.side_effect = _create_vpc_mock client_mock.get_all_zones.return_value = [MagicMock()] client_mock.describe_dhcp_options.return_value = { 'DhcpOptions': [MagicMock()] } boto3_client_mock.return_value = client_mock auto_vpc = DiscoVPC('auto-vpc', 'auto-vpc-type') possible_vpcs = [ '10.0.0.0/26', '10.0.0.64/26', '10.0.0.128/26', '10.0.0.192/26' ] self.assertIn(str(auto_vpc.vpc['CidrBlock']), possible_vpcs)
def test_get_alarm_config_log_pattern_metric(self): """Test DiscoAlarmsConfig get_alarms for log pattern metrics""" disco_alarms_config = DiscoAlarmsConfig(ENVIRONMENT, autoscale=self.autoscale) disco_alarms_config.config = get_mock_config({ 'reporting.LogMetrics.ErrorCount.mhcrasberi': { 'log_pattern_metric': 'true', 'threshold_max': '1', 'duration': '60', 'period': '5', 'statistic': 'average', 'custom_metric': 'false', 'level': 'critical' } }) alarm_configs = disco_alarms_config.get_alarms('mhcrasberi') self.assertEqual(1, len(alarm_configs)) self.assertEqual('LogMetrics/' + ENVIRONMENT, alarm_configs[0].namespace) self.assertEqual('mhcrasberi-ErrorCount', alarm_configs[0].metric_name)
def test_get_alarm_config(self): """Test DiscoAlarmsConfig get_alarms for regular metrics""" disco_alarms_config = DiscoAlarmsConfig(ENVIRONMENT, autoscale=self.autoscale) disco_alarms_config.config = get_mock_config({ 'reporting.AWS/EC2.CPU.mhcrasberi': { 'log_pattern_metric': 'false', 'threshold_max': '1', 'duration': '60', 'period': '5', 'statistic': 'average', 'custom_metric': 'false', 'level': 'critical' } }) alarm_configs = disco_alarms_config.get_alarms('mhcrasberi') self.assertEqual(1, len(alarm_configs)) self.assertEqual('AWS/EC2', alarm_configs[0].namespace) self.assertEqual('CPU', alarm_configs[0].metric_name) self.assertEqual(MOCK_GROUP_NAME, alarm_configs[0].autoscaling_group_name)
def test_get_alarm_config_elb_metric(self): """Test DiscoAlarmsConfig get_alarms for ELB metrics""" disco_alarms_config = DiscoAlarmsConfig(ENVIRONMENT, autoscale=self.autoscale) disco_alarms_config.config = get_mock_config({ 'reporting.AWS/ELB.HealthyHostCount.mhcbanana': { 'threshold_min': '1', 'duration': '60', 'period': '5', 'statistic': 'Minimum', 'custom_metric': 'false', 'level': 'critical' } }) alarm_configs = disco_alarms_config.get_alarms('mhcbanana') self.assertEqual(1, len(alarm_configs)) self.assertEqual( {'LoadBalancerName': DiscoELB.get_elb_id('testenv', 'mhcbanana')}, alarm_configs[0].dimensions)
def setUp(self): self.log_metrics = DiscoLogMetrics('test-env') self.log_metrics.logs = MagicMock() config_mock = PropertyMock(return_value=get_mock_config({ 'mhcdummy.metric_name': { 'log_file': '/error_log', 'filter_pattern': 'error', 'metric_value': 1 } })) type(self.log_metrics).config = config_mock # pylint: disable=C0103 def _describe_log_groups(logGroupNamePrefix): if logGroupNamePrefix == 'test-env/': # getting all log metrics in env return { 'logGroups': [{ 'logGroupName': 'test-env/mhcdummy/info_log' }, { 'logGroupName': 'test-env/mhcbanana/warning_log' }] } # getting all log metrics for hostclass return { 'logGroups': [{ 'logGroupName': 'test-env/mhcdummy/info_log' }] } # pylint: disable=C0103 def _describe_metric_filters(logGroupName): if logGroupName == 'test-env/mhcdummy/info_log': return {'metricFilters': [{'filterName': 'mhcdummy_metric'}]} elif logGroupName == 'test-env/mhcbanana/warning_log': return {'metricFilters': [{'filterName': 'mhcbanana_metric'}]} self.log_metrics.logs.describe_log_groups.side_effect = _describe_log_groups self.log_metrics.logs.describe_metric_filters.side_effect = _describe_metric_filters
def test_get_alarm_config_log_pattern_metric(self): """Test DiscoAlarmsConfig get_alarms for log pattern metrics""" disco_alarms_config = DiscoAlarmsConfig(ENVIRONMENT, autoscale=self.autoscale) disco_alarms_config.config = get_mock_config({ 'reporting.LogMetrics.ErrorCount.mhcrasberi': { 'log_pattern_metric': 'true', 'threshold_max': '1', 'duration': '60', 'period': '5', 'statistic': 'average', 'custom_metric': 'false', 'level': 'critical' } }) alarm_configs = disco_alarms_config.get_alarms('mhcrasberi') self.assertEqual(1, len(alarm_configs)) self.assertEqual('LogMetrics/' + ENVIRONMENT, alarm_configs[0].namespace) self.assertEqual('mhcrasberi-ErrorCount', alarm_configs[0].metric_name)
def test_get_alarm_config_es_metric(self): """Test DiscoAlarmsConfig get_alarms for ES metrics""" disco_alarms_config = DiscoAlarmsConfig(ENVIRONMENT, autoscale=self.autoscale, elasticsearch=self.elasticsearch) disco_alarms_config.config = get_mock_config({ 'astro.AWS/ES.FreeStorageSpace.logs': { 'threshold_min': '1', 'duration': '60', 'period': '5', 'statistic': 'Minimum', 'custom_metric': 'false', 'level': 'critical' } }) alarm_configs = disco_alarms_config.get_alarms('logs') self.assertEqual(1, len(alarm_configs)) self.assertEqual({ 'DomainName': ELASTICSEARCH_DOMAIN_NAME, 'ClientId': ELASTICSEARCH_CLIENT_ID }, alarm_configs[0].dimensions)
def _get_config(*_args, **_kwargs): return get_mock_config({ 'unittest:new-cache': { 'instance_type': 'cache.m1.small', 'engine': 'redis', 'engine_version': '2.8.6', 'port': '1000', 'parameter_group': 'default', 'num_nodes': '5', 'auto_failover': 'true', 'maintenance_window': 'sun:10:00-sun:11:00' }, 'unittest:old-cache': { 'instance_type': 'cache.m1.small', 'engine': 'redis', 'engine_version': '2.8.6', 'port': '1000', 'parameter_group': 'default', 'num_nodes': '5', 'auto_failover': 'true' } })
def test_get_alarm_config(self): """Test DiscoAlarmsConfig get_alarms for regular metrics""" disco_alarms_config = DiscoAlarmsConfig(ENVIRONMENT, autoscale=self.autoscale) disco_alarms_config.config = get_mock_config({ 'reporting.AWS/EC2.CPU.mhcrasberi': { 'log_pattern_metric': 'false', 'threshold_max': '1', 'duration': '60', 'period': '5', 'statistic': 'average', 'custom_metric': 'false', 'level': 'critical' } }) alarm_configs = disco_alarms_config.get_alarms('mhcrasberi') self.assertEqual(1, len(alarm_configs)) self.assertEqual('AWS/EC2', alarm_configs[0].namespace) self.assertEqual('CPU', alarm_configs[0].metric_name) self.assertEqual(MOCK_GROUP_NAME, alarm_configs[0].autoscaling_group_name)
def test_provision_hc_with_chaos_using_config(self, mock_config, **kwargs): """ Provision creates the proper launch configuration and autoscaling group with chaos from config """ config_dict = get_default_config_dict() config_dict["mhcunittest"]["chaos"] = "True" aws = DiscoAWS(config=get_mock_config(config_dict), environment_name=TEST_ENV_NAME, log_metrics=MagicMock()) mock_ami = self._get_image_mock(aws) aws.update_elb = MagicMock(return_value=None) aws.discogroup.elastigroup.spotinst_client = MagicMock() aws.vpc.environment_class = None with patch("disco_aws_automation.DiscoAWS.get_meta_network", return_value=_get_meta_network_mock()): with patch("boto.ec2.connection.EC2Connection.get_all_snapshots", return_value=[]): with patch("disco_aws_automation.DiscoAWS.create_scaling_schedule", return_value=None): with patch("boto.ec2.autoscale.AutoScaleConnection.create_or_update_tags", return_value=None): with patch("disco_aws_automation.DiscoELB.get_or_create_target_group", return_value="foobar"): with patch("disco_aws_automation.DiscoAutoscale.update_tg", return_value=None): metadata = aws.provision(ami=mock_ami, hostclass="mhcunittest", owner="unittestuser", min_size=1, desired_size=1, max_size=1) self.assertEqual(metadata["hostclass"], "mhcunittest") self.assertFalse(metadata["no_destroy"]) self.assertTrue(metadata["chaos"]) _lc = aws.discogroup.get_configs()[0] self.assertRegexpMatches(_lc.name, r".*_mhcunittest_[0-9]*") self.assertEqual(_lc.image_id, mock_ami.id) self.assertTrue(aws.discogroup.get_existing_group(hostclass="mhcunittest")) _ag = aws.discogroup.get_existing_groups()[0] self.assertRegexpMatches(_ag['name'], r"unittestenv_mhcunittest_[0-9]*") self.assertEqual(_ag['min_size'], 1) self.assertEqual(_ag['max_size'], 1) self.assertEqual(_ag['desired_capacity'], 1)
def test_get_alarm_config_es_metric(self): """Test DiscoAlarmsConfig get_alarms for ES metrics""" disco_alarms_config = DiscoAlarmsConfig( ENVIRONMENT, autoscale=self.autoscale, elasticsearch=self.elasticsearch) disco_alarms_config.config = get_mock_config({ 'astro.AWS/ES.FreeStorageSpace.logs': { 'threshold_min': '1', 'duration': '60', 'period': '5', 'statistic': 'Minimum', 'custom_metric': 'false', 'level': 'critical' } }) alarm_configs = disco_alarms_config.get_alarms('logs') self.assertEqual(1, len(alarm_configs)) self.assertEqual( { 'DomainName': ELASTICSEARCH_DOMAIN_NAME, 'ClientId': ELASTICSEARCH_CLIENT_ID }, alarm_configs[0].dimensions)
def _get_config(*_args, **_kwargs): return get_mock_config( { 'unittest:new-cache': { 'instance_type': 'cache.m1.small', 'engine': 'redis', 'engine_version': '2.8.6', 'port': '1000', 'parameter_group': 'default', 'num_nodes': '5', 'auto_failover': 'true', 'maintenance_window': 'sun:10:00-sun:11:00' }, 'unittest:old-cache': { 'instance_type': 'cache.m1.small', 'engine': 'redis', 'engine_version': '2.8.6', 'port': '1000', 'parameter_group': 'default', 'num_nodes': '5', 'auto_failover': 'true' } } )
def test_create_vpc_ntp_names(self, meta_network_mock, boto3_resource_mock, boto3_client_mock, config_mock, sleep_mock, gateways_mock, sns_mock, endpoints_mock, rds_mock, gethostbyname_mock): """Test creating VPC with NTP server names""" # FIXME This needs to mock way too many things. DiscoVPC needs to be refactored local_dict = { 'dhcp_options_created': False, 'ntp_servers_dict': { '0.mock.ntp.server': '100.10.10.10', '1.mock.ntp.server': '100.10.10.11', '2.mock.ntp.server': '100.10.10.12' }, 'new_mock_dhcp_options_id': 'new_mock_dhcp_options_id', 'mock_vpc_id': 'mock_vpc_id' } config_mock.return_value = get_mock_config({ 'envtype:auto-vpc-type': { 'ip_space': '10.0.0.0/24', 'vpc_cidr_size': '26', 'intranet_cidr': 'auto', 'tunnel_cidr': 'auto', 'dmz_cidr': 'auto', 'maintenance_cidr': 'auto', 'ntp_server': ' '.join(local_dict['ntp_servers_dict'].keys()) } }) # pylint: disable=C0103 def _create_vpc_mock(CidrBlock): return {'Vpc': {'CidrBlock': CidrBlock, 'VpcId': local_dict['mock_vpc_id'], 'DhcpOptionsId': 'mock_dhcp_options_id'}} def _create_create_dhcp_mock(**args): local_dict['dhcp_options_created'] = True return {'DhcpOptions': {'DhcpOptionsId': local_dict['new_mock_dhcp_options_id']}} def _create_describe_dhcp_mock(**args): return {'DhcpOptions': [{'DhcpOptionsId': local_dict['new_mock_dhcp_options_id']}]} \ if local_dict['dhcp_options_created'] else {'DhcpOptions': []} def _create_gethostbyname_mock(hostname): return local_dict['ntp_servers_dict'][hostname] client_mock = MagicMock() client_mock.create_vpc.side_effect = _create_vpc_mock client_mock.get_all_zones.return_value = [MagicMock()] client_mock.create_dhcp_options.side_effect = _create_create_dhcp_mock client_mock.describe_dhcp_options.side_effect = _create_describe_dhcp_mock gethostbyname_mock.side_effect = _create_gethostbyname_mock boto3_client_mock.return_value = client_mock # Calling method under test DiscoVPC('auto-vpc', 'auto-vpc-type') # Verifying result actual_ntp_servers = [ option['Values'] for option in client_mock.create_dhcp_options.call_args[1]['DhcpConfigurations'] if option['Key'] == 'ntp-servers'][0] self.assertEqual(set(actual_ntp_servers), set(local_dict['ntp_servers_dict'].values())) client_mock.associate_dhcp_options.assert_has_calls( [call(DhcpOptionsId=local_dict['new_mock_dhcp_options_id'], VpcId=local_dict['mock_vpc_id'])])
def test_update_meta_network_sg_rules(self, networks_mock, config_mock): """ Verify creating all new security group rules """ config_mock.return_value = get_mock_config({ 'envtype:auto-vpc-type': { 'intranet_sg_rules': 'tcp all 0:65535, udp intranet 0:65535, tcp dmz 2181', 'tunnel_sg_rules': 'tcp all 25 80 443, tcp maintenance 22, udp all 123', 'dmz_sg_rules': 'tcp maintenance 22 3212, tcp 66.104.227.162/32 80 443, ' 'tcp 38.117.159.162/32 80 443, tcp 64.106.168.244/32 80 443', 'maintenance_sg_rules': 'tcp maintenance 22, tcp 66.104.227.162/32 0:65535, ' 'tcp 38.117.159.162/32 0:65535', 'customer_ports': '80 443', 'customer_cidr': '0.0.0.0/0' } }) mock_intranet = _create_network_mock('intranet', 'intranet_sg') mock_dmz = _create_network_mock('dmz', 'dmz_sg') mock_tunnel = _create_network_mock('tunnel', 'tunnel_sg') mock_maintenance = _create_network_mock('maintenance', 'maintenance_sg') networks_mock.return_value = { mock_intranet.name: mock_intranet, mock_dmz.name: mock_dmz, mock_tunnel.name: mock_tunnel, mock_maintenance.name: mock_maintenance } self.disco_vpc_sg_rules.update_meta_network_sg_rules() expected_intranet_sg_rules = [ (mock_intranet.security_group.id, 'tcp', 0, 65535, mock_tunnel.security_group.id, None), (mock_intranet.security_group.id, 'tcp', 0, 65535, mock_intranet.security_group.id, None), (mock_intranet.security_group.id, 'tcp', 0, 65535, mock_dmz.security_group.id, None), (mock_intranet.security_group.id, 'tcp', 0, 65535, mock_maintenance.security_group.id, None), (mock_intranet.security_group.id, 'udp', 0, 65535, mock_intranet.security_group.id, None), (mock_intranet.security_group.id, 'tcp', 2181, 2181, mock_dmz.security_group.id, None), (mock_intranet.security_group.id, 'tcp', 80, 80, mock_dmz.security_group.id, None), (mock_intranet.security_group.id, 'tcp', 443, 443, mock_dmz.security_group.id, None) ] mock_intranet.update_sg_rules.assert_called_once_with( expected_intranet_sg_rules, False) expected_tunnel_sg_rules = [ (mock_tunnel.security_group.id, 'tcp', 25, 25, mock_tunnel.security_group.id, None), (mock_tunnel.security_group.id, 'tcp', 25, 25, mock_intranet.security_group.id, None), (mock_tunnel.security_group.id, 'tcp', 25, 25, mock_dmz.security_group.id, None), (mock_tunnel.security_group.id, 'tcp', 25, 25, mock_maintenance.security_group.id, None), (mock_tunnel.security_group.id, 'tcp', 80, 80, mock_tunnel.security_group.id, None), (mock_tunnel.security_group.id, 'tcp', 80, 80, mock_intranet.security_group.id, None), (mock_tunnel.security_group.id, 'tcp', 80, 80, mock_dmz.security_group.id, None), (mock_tunnel.security_group.id, 'tcp', 80, 80, mock_maintenance.security_group.id, None), (mock_tunnel.security_group.id, 'tcp', 443, 443, mock_tunnel.security_group.id, None), (mock_tunnel.security_group.id, 'tcp', 443, 443, mock_intranet.security_group.id, None), (mock_tunnel.security_group.id, 'tcp', 443, 443, mock_dmz.security_group.id, None), (mock_tunnel.security_group.id, 'tcp', 443, 443, mock_maintenance.security_group.id, None), (mock_tunnel.security_group.id, 'tcp', 22, 22, mock_maintenance.security_group.id, None), (mock_tunnel.security_group.id, 'udp', 123, 123, mock_tunnel.security_group.id, None), (mock_tunnel.security_group.id, 'udp', 123, 123, mock_intranet.security_group.id, None), (mock_tunnel.security_group.id, 'udp', 123, 123, mock_dmz.security_group.id, None), (mock_tunnel.security_group.id, 'udp', 123, 123, mock_maintenance.security_group.id, None) ] mock_tunnel.update_sg_rules.assert_called_once_with( expected_tunnel_sg_rules, False) expected_dmz_sg_rules = [ (mock_dmz.security_group.id, 'tcp', 22, 22, mock_maintenance.security_group.id, None), (mock_dmz.security_group.id, 'tcp', 3212, 3212, mock_maintenance.security_group.id, None), (mock_dmz.security_group.id, 'tcp', 80, 80, None, '66.104.227.162/32'), (mock_dmz.security_group.id, 'tcp', 443, 443, None, '66.104.227.162/32'), (mock_dmz.security_group.id, 'tcp', 80, 80, None, '38.117.159.162/32'), (mock_dmz.security_group.id, 'tcp', 443, 443, None, '38.117.159.162/32'), (mock_dmz.security_group.id, 'tcp', 80, 80, None, '64.106.168.244/32'), (mock_dmz.security_group.id, 'tcp', 443, 443, None, '64.106.168.244/32'), (mock_dmz.security_group.id, 'tcp', 80, 80, None, '0.0.0.0/0'), (mock_dmz.security_group.id, 'tcp', 80, 80, mock_dmz.security_group.id, None), (mock_dmz.security_group.id, 'tcp', 443, 443, None, '0.0.0.0/0'), (mock_dmz.security_group.id, 'tcp', 443, 443, mock_dmz.security_group.id, None) ] mock_dmz.update_sg_rules.assert_called_once_with( expected_dmz_sg_rules, False) expected_maintenance_sg_rules = [ (mock_maintenance.security_group.id, 'tcp', 22, 22, mock_maintenance.security_group.id, None), (mock_maintenance.security_group.id, 'tcp', 0, 65535, None, '66.104.227.162/32'), (mock_maintenance.security_group.id, 'tcp', 0, 65535, None, '38.117.159.162/32') ] mock_maintenance.update_sg_rules.assert_called_once_with( expected_maintenance_sg_rules, False)
def test_read_env_from_config(self): """Verify that we read the env from config if none is provided""" config_aws = get_mock_config(MOCK_AWS_CONFIG_DEFINITION) self._ssm = DiscoSSM(config_aws=config_aws) self.assertEqual(TEST_ENV_NAME, self._ssm.environment_name)
def setUp(self): config_aws = get_mock_config(MOCK_AWS_CONFIG_DEFINITION) self._ssm = DiscoSSM(environment_name=TEST_ENV_NAME, config_aws=config_aws)
def test_update_nat_gateways_and_routes(self, config_mock, meta_network_mock): """ Verify NAT gateways and the routes to them are created properly """ config_mock.return_value = get_mock_config({ 'envtype:sandbox': { 'ip_space': '10.0.0.0/24', 'vpc_cidr_size': '26', 'intranet_cidr': 'auto', 'tunnel_cidr': 'auto', 'dmz_cidr': 'auto', 'maintenance_cidr': 'auto', 'ntp_server': '10.0.0.5', 'tunnel_nat_gateways': '10.1.0.4,10.1.0.5,10.1.0.6', 'intranet_nat_gateways': 'auto', 'nat_gateway_routes': 'intranet/tunnel' } }) network_intranet_mock = MagicMock() network_dmz_mock = MagicMock() network_maintenance_mock = MagicMock() network_tunnel_mock = MagicMock() def _meta_network_mock(name, vpc, network_cidr=None, boto3_connection=None): if name == 'intranet': ret = network_intranet_mock elif name == 'dmz': ret = network_dmz_mock elif name == 'maintenance': ret = network_maintenance_mock elif name == 'tunnel': ret = network_tunnel_mock else: return None ret.name = name ret.vpc = vpc ret.subnet_ids = ["subnet-cafe", "subnet-beef"] ret.get_nat_gateway_metanetwork.return_value = None if network_cidr: ret.network_cidr = IPNetwork(network_cidr) else: ret.network_cidr = IPNetwork('10.0.0.0/26') return ret meta_network_mock.side_effect = _meta_network_mock # End of setting up test # Calling method under test self.disco_vpc_gateways.update_nat_gateways_and_routes() # Verifying correct behavior network_intranet_mock.upsert_nat_gateway_route.assert_called_once_with( network_tunnel_mock) network_tunnel_mock.add_nat_gateways.assert_called_once_with( allocation_ids=[ self.disco_vpc_gateways.eip.find_eip_address( 'eip').allocation_id, self.disco_vpc_gateways.eip.find_eip_address( 'eip').allocation_id, self.disco_vpc_gateways.eip.find_eip_address( 'eip').allocation_id ])
def test_update_gateways_and_routes(self, time_mock, config_mock, meta_network_mock): """ Verify Internet and VPN gateways and the routes to them are created properly """ config_mock.return_value = get_mock_config({ 'envtype:sandbox': { 'ip_space': '10.0.0.0/24', 'vpc_cidr_size': '26', 'intranet_cidr': 'auto', 'tunnel_cidr': 'auto', 'dmz_cidr': 'auto', 'maintenance_cidr': 'auto', 'ntp_server': '10.0.0.5', 'tunnel_igw_routes': '0.0.0.0/0', 'dmz_igw_routes': '66.104.227.162/32 38.117.159.162/32 64.106.168.244/32', 'maintenance_vgw_routes': '10.1.0.22/32 10.1.0.24/32'} }) network_intranet_mock = MagicMock() network_dmz_mock = MagicMock() network_maintenance_mock = MagicMock() network_tunnel_mock = MagicMock() def _meta_network_mock(name, vpc, network_cidr=None, boto3_connection=None): if name == 'intranet': ret = network_intranet_mock elif name == 'dmz': ret = network_dmz_mock elif name == 'maintenance': ret = network_maintenance_mock elif name == 'tunnel': ret = network_tunnel_mock else: return None ret.name = name ret.vpc = vpc if network_cidr: ret.network_cidr = IPNetwork(network_cidr) else: ret.network_cidr = IPNetwork('10.0.0.0/26') return ret meta_network_mock.side_effect = _meta_network_mock self.mock_vpc.boto3_ec2.describe_internet_gateways.return_value = { 'InternetGateways': [{'InternetGatewayId': MOCK_IGW_ID}] } self.mock_vpc.boto3_ec2.describe_vpn_gateways.return_value = { 'VpnGateways': [{'VpnGatewayId': MOCK_VGW_ID, 'VpcAttachments': [ {'State': 'attached', 'VpcId': MOCK_VPC_ID}]}] } # End of setting up test # Calling method under test self.disco_vpc_gateways.update_gateways_and_routes() # Verifying correct behavior network_intranet_mock.update_gateways_and_routes.assert_called_once_with([], False) network_dmz_mock.update_gateways_and_routes.assert_called_once_with( [('66.104.227.162/32', MOCK_IGW_ID), ('38.117.159.162/32', MOCK_IGW_ID), ('64.106.168.244/32', MOCK_IGW_ID)], False) network_maintenance_mock.update_gateways_and_routes.assert_called_once_with( [('10.1.0.22/32', MOCK_VGW_ID), ('10.1.0.24/32', MOCK_VGW_ID)], False) network_tunnel_mock.update_gateways_and_routes.assert_called_once_with( [('0.0.0.0/0', MOCK_IGW_ID)], False) self.mock_vpc.boto3_ec2.attach_vpn_gateway.assert_called_once_with( VpcId=MOCK_VPC_ID, VpnGatewayId=MOCK_VGW_ID)
def setUp(self): self.mock_route_53 = _get_mock_route53() config_aws = get_mock_config(MOCK_AWS_CONFIG_DEFINITION) config_vpc = get_mock_config(MOCK_VPC_CONFIG_DEFINITION) config_es = get_mock_config(MOCK_ES_CONFIG_DEFINITION) self.account_id = ''.join(random.choice("0123456789") for _ in range(12)) self.region = "us-west-2" self.environment_name = "foo" self.mock_alarms = MagicMock() self._es = DiscoElasticsearch(environment_name=self.environment_name, alarms=self.mock_alarms, config_aws=config_aws, config_es=config_es, config_vpc=config_vpc, route53=self.mock_route_53) self._es._account_id = self.account_id self._es._region = self.region self._es._conn = MagicMock() self.domain_configs = {} def _list_domain_names(): domain_names = [{"DomainName": domain_name} for domain_name in self.domain_configs] return {"DomainNames": domain_names} # pylint doesn't like Boto3's argument names # pylint: disable=C0103 def _delete_elasticsearch_domain(DomainName): self.domain_configs.pop(DomainName, None) # pylint doesn't like Boto3's argument names # pylint: disable=C0103 def _describe_elasticsearch_domain(DomainName): return self.domain_configs[DomainName] def _create_elasticsearch_domain(**config): domain_name = config["DomainName"] if domain_name in self.domain_configs: endpoint = self.domain_configs[domain_name]["DomainStatus"]["Endpoint"] domain_id = self.domain_configs[domain_name]["DomainStatus"]["DomainId"] else: cluster_id = ''.join(random.choice("0123456789abcdef") for _ in range(60)) endpoint = "search-{}-{}.{}.es.amazonaws.com".format(domain_name, cluster_id, self.region) client_id = ''.join(random.choice("0123456789") for _ in range(12)) domain_id = "{}/{}".format(client_id, domain_name) config["Endpoint"] = endpoint config["DomainId"] = domain_id domain_config = { "DomainStatus": config } self.domain_configs[domain_name] = domain_config def _update_elasticsearch_domain_config(**config): if config["DomainName"] not in self.domain_configs: raise RuntimeError("Domain not found: {}".format(config["DomainName"])) _create_elasticsearch_domain(**config) self._es._conn.list_domain_names.side_effect = _list_domain_names self._es._conn.delete_elasticsearch_domain.side_effect = _delete_elasticsearch_domain self._es._conn.describe_elasticsearch_domain.side_effect = _describe_elasticsearch_domain self._es._conn.create_elasticsearch_domain.side_effect = _create_elasticsearch_domain self._es._conn.update_elasticsearch_domain_config.side_effect = _update_elasticsearch_domain_config
def test_update_gateways_and_routes(self, time_mock, config_mock, meta_network_mock): """ Verify Internet and VPN gateways and the routes to them are created properly """ config_mock.return_value = get_mock_config({ 'envtype:sandbox': { 'ip_space': '10.0.0.0/24', 'vpc_cidr_size': '26', 'intranet_cidr': 'auto', 'tunnel_cidr': 'auto', 'dmz_cidr': 'auto', 'maintenance_cidr': 'auto', 'ntp_server': '10.0.0.5', 'tunnel_igw_routes': '0.0.0.0/0', 'dmz_igw_routes': '66.104.227.162/32 38.117.159.162/32 64.106.168.244/32', 'maintenance_vgw_routes': '10.1.0.22/32 10.1.0.24/32' } }) network_intranet_mock = MagicMock() network_dmz_mock = MagicMock() network_maintenance_mock = MagicMock() network_tunnel_mock = MagicMock() def _meta_network_mock(name, vpc, network_cidr=None, boto3_connection=None): if name == 'intranet': ret = network_intranet_mock elif name == 'dmz': ret = network_dmz_mock elif name == 'maintenance': ret = network_maintenance_mock elif name == 'tunnel': ret = network_tunnel_mock else: return None ret.name = name ret.vpc = vpc if network_cidr: ret.network_cidr = IPNetwork(network_cidr) else: ret.network_cidr = IPNetwork('10.0.0.0/26') return ret meta_network_mock.side_effect = _meta_network_mock self.mock_vpc.boto3_ec2.describe_internet_gateways.return_value = { 'InternetGateways': [{ 'InternetGatewayId': MOCK_IGW_ID }] } self.mock_vpc.boto3_ec2.describe_vpn_gateways.return_value = { 'VpnGateways': [{ 'VpnGatewayId': MOCK_VGW_ID, 'VpcAttachments': [{ 'State': 'attached', 'VpcId': MOCK_VPC_ID }] }] } # End of setting up test # Calling method under test self.disco_vpc_gateways.update_gateways_and_routes() # Verifying correct behavior network_intranet_mock.update_gateways_and_routes.assert_called_once_with( [], False) network_dmz_mock.update_gateways_and_routes.assert_called_once_with( [('66.104.227.162/32', MOCK_IGW_ID), ('38.117.159.162/32', MOCK_IGW_ID), ('64.106.168.244/32', MOCK_IGW_ID)], False) network_maintenance_mock.update_gateways_and_routes.assert_called_once_with( [('10.1.0.22/32', MOCK_VGW_ID), ('10.1.0.24/32', MOCK_VGW_ID)], False) network_tunnel_mock.update_gateways_and_routes.assert_called_once_with( [('0.0.0.0/0', MOCK_IGW_ID)], False) self.mock_vpc.boto3_ec2.attach_vpn_gateway.assert_called_once_with( VpcId=MOCK_VPC_ID, VpnGatewayId=MOCK_VGW_ID)
def test_create_vpc_ntp_names(self, meta_network_mock, boto3_resource_mock, boto3_client_mock, config_mock, sleep_mock, gateways_mock, sns_mock, endpoints_mock, rds_mock, gethostbyname_mock): """Test creating VPC with NTP server names""" # FIXME This needs to mock way too many things. DiscoVPC needs to be refactored local_dict = { 'dhcp_options_created': False, 'ntp_servers_dict': { '0.mock.ntp.server': '100.10.10.10', '1.mock.ntp.server': '100.10.10.11', '2.mock.ntp.server': '100.10.10.12' }, 'new_mock_dhcp_options_id': 'new_mock_dhcp_options_id', 'mock_vpc_id': 'mock_vpc_id' } config_mock.return_value = get_mock_config({ 'envtype:auto-vpc-type': { 'ip_space': '10.0.0.0/24', 'vpc_cidr_size': '26', 'intranet_cidr': 'auto', 'tunnel_cidr': 'auto', 'dmz_cidr': 'auto', 'maintenance_cidr': 'auto', 'ntp_server': ' '.join(local_dict['ntp_servers_dict'].keys()) } }) # pylint: disable=C0103 def _create_vpc_mock(CidrBlock): return { 'Vpc': { 'CidrBlock': CidrBlock, 'VpcId': local_dict['mock_vpc_id'], 'DhcpOptionsId': 'mock_dhcp_options_id' } } def _create_create_dhcp_mock(**args): local_dict['dhcp_options_created'] = True return { 'DhcpOptions': { 'DhcpOptionsId': local_dict['new_mock_dhcp_options_id'] } } def _create_describe_dhcp_mock(**args): return {'DhcpOptions': [{'DhcpOptionsId': local_dict['new_mock_dhcp_options_id']}]} \ if local_dict['dhcp_options_created'] else {'DhcpOptions': []} def _create_gethostbyname_mock(hostname): return local_dict['ntp_servers_dict'][hostname] client_mock = MagicMock() client_mock.create_vpc.side_effect = _create_vpc_mock client_mock.get_all_zones.return_value = [MagicMock()] client_mock.create_dhcp_options.side_effect = _create_create_dhcp_mock client_mock.describe_dhcp_options.side_effect = _create_describe_dhcp_mock gethostbyname_mock.side_effect = _create_gethostbyname_mock boto3_client_mock.return_value = client_mock # Calling method under test DiscoVPC('auto-vpc', 'auto-vpc-type') # Verifying result actual_ntp_servers = [ option['Values'] for option in client_mock.create_dhcp_options.call_args[1]['DhcpConfigurations'] if option['Key'] == 'ntp-servers' ][0] self.assertEqual(set(actual_ntp_servers), set(local_dict['ntp_servers_dict'].values())) client_mock.associate_dhcp_options.assert_has_calls([ call(DhcpOptionsId=local_dict['new_mock_dhcp_options_id'], VpcId=local_dict['mock_vpc_id']) ])
def test_create_vpc_with_custom_tags(self, boto3_resource_mock, boto3_client_mock, config_mock, endpoints_mock, sns_mock, rds_mock): """Test creating a VPC with a dynamic ip range and tags""" # FIXME This needs to mock way too many things. DiscoVPC needs to be refactored config_mock.return_value = get_mock_config({ 'envtype:auto-vpc-type': { 'ip_space': '10.0.0.0/24', 'vpc_cidr_size': '26', 'intranet_cidr': 'auto', 'tunnel_cidr': 'auto', 'dmz_cidr': 'auto', 'maintenance_cidr': 'auto', 'ntp_server': '10.0.0.5', 'application': 'test' } }) # pylint: disable=C0103 def _create_vpc_mock(CidrBlock): return { 'Vpc': { 'CidrBlock': CidrBlock, 'VpcId': 'mock_vpc_id', 'DhcpOptionsId': 'mock_dhcp_options_id' } } client_mock = MagicMock() client_mock.create_vpc.side_effect = _create_vpc_mock boto3_client_mock.return_value = client_mock resource_mock = MagicMock() resource_mock.Vpc.create_tags.return_value = [] boto3_resource_mock.return_value = resource_mock client_mock.describe_vpn_gateways.return_value = {'VpnGateways': []} my_tags_options = {'productline': 'astronauts', 'mytag': 'tag_value'} DiscoVPC._get_vpc_cidr = MagicMock() DiscoVPC._get_vpc_cidr.return_value = '10.0.0.0/26' with patch("disco_aws_automation.DiscoVPC._create_new_meta_networks", return_value=MagicMock(return_value={})): with patch("disco_aws_automation.DiscoVPC._update_dhcp_options", return_value=None): # The expect list of tag dictionaries expected_vpc_tags = [{ 'Value': 'auto-vpc', 'Key': 'Name' }, { 'Value': 'auto-vpc-type', 'Key': 'type' }, { 'Value': 'ANY', 'Key': 'create_date' }, { 'Value': 'astronauts', 'Key': 'productline' }, { 'Value': 'tag_value', 'Key': 'mytag' }, { 'Value': 'test', 'Key': 'application' }] DiscoVPC('auto-vpc', 'auto-vpc-type', vpc_tags=my_tags_options) # Get the create_tags argument call_args_tags = resource_mock.Vpc.return_value.create_tags.call_args[ 1] # Verify Option Name self.assertEqual(['Tags'], call_args_tags.keys()) call_tags_dict = call_args_tags['Tags'] # Verify the number of tag Dictionaries in the list self.assertEqual(6, len(call_tags_dict)) # Verify each tag options for tag_option in call_tags_dict: if tag_option['Key'] == 'create_date': tag_option['Value'] = 'ANY' self.assertIn(tag_option, expected_vpc_tags)