Ejemplo n.º 1
0
    def test_provision_hc_with_chaos_using_config(self, mock_config, **kwargs):
        """
        Provision creates the proper launch configuration and autoscaling group with chaos from config
        """
        config_dict = get_default_config_dict()
        config_dict["mhcunittest"]["chaos"] = "True"
        aws = DiscoAWS(config=get_mock_config(config_dict), environment_name=TEST_ENV_NAME)
        mock_ami = self._get_image_mock(aws)
        aws.log_metrics = MagicMock()
        aws.update_elb = MagicMock(return_value=None)

        with patch("disco_aws_automation.DiscoAWS.get_meta_network", return_value=_get_meta_network_mock()):
            with patch("boto.ec2.connection.EC2Connection.get_all_snapshots", return_value=[]):
                with patch("disco_aws_automation.DiscoAWS.create_scaling_schedule", return_value=None):
                    with patch("boto.ec2.autoscale.AutoScaleConnection.create_or_update_tags",
                               return_value=None):
                        metadata = aws.provision(ami=mock_ami, hostclass="mhcunittest",
                                                 owner="unittestuser",
                                                 min_size=1, desired_size=1, max_size=1)

        self.assertEqual(metadata["hostclass"], "mhcunittest")
        self.assertFalse(metadata["no_destroy"])
        self.assertTrue(metadata["chaos"])
        _lc = aws.autoscale.get_configs()[0]
        self.assertRegexpMatches(_lc.name, r".*_mhcunittest_[0-9]*")
        self.assertEqual(_lc.image_id, mock_ami.id)
        self.assertTrue(aws.autoscale.has_group("mhcunittest"))
        _ag = aws.autoscale.get_groups()[0]
        self.assertEqual(_ag.name, "unittestenv_mhcunittest")
        self.assertEqual(_ag.min_size, 1)
        self.assertEqual(_ag.max_size, 1)
        self.assertEqual(_ag.desired_capacity, 1)
Ejemplo n.º 2
0
    def setUp(self):
        self.log_metrics = DiscoLogMetrics('test-env')
        self.log_metrics.logs = MagicMock()

        config_mock = PropertyMock(return_value=get_mock_config({
            'mhcdummy.metric_name': {
                'log_file': '/error_log',
                'filter_pattern': 'error',
                'metric_value': 1
            }
        }))
        type(self.log_metrics).config = config_mock

        # pylint: disable=C0103
        def _describe_log_groups(logGroupNamePrefix):
            if logGroupNamePrefix == 'test-env/':  # getting all log metrics in env
                return {'logGroups': [{'logGroupName': 'test-env/mhcdummy/info_log'},
                                      {'logGroupName': 'test-env/mhcbanana/warning_log'}]}

            else:  # getting all log metrics for hostclass
                return {'logGroups': [{'logGroupName': 'test-env/mhcdummy/info_log'}]}

        # pylint: disable=C0103
        def _describe_metric_filters(logGroupName):
            if logGroupName == 'test-env/mhcdummy/info_log':
                return {'metricFilters': [{'filterName': 'mhcdummy_metric'}]}
            elif logGroupName == 'test-env/mhcbanana/warning_log':
                return {'metricFilters': [{'filterName': 'mhcbanana_metric'}]}

        self.log_metrics.logs.describe_log_groups.side_effect = _describe_log_groups
        self.log_metrics.logs.describe_metric_filters.side_effect = _describe_metric_filters
Ejemplo n.º 3
0
 def test_eligible_instances_retainage_zero(self):
     """Test that retainage of zero retatins nothing"""
     config_dict = get_default_config_dict()
     self.chaos = DiscoChaos(config=get_mock_config(config_dict),
                             environment_name=TEST_ENV_NAME,
                             level=25.0, retainage=0.0)
     self.chaos._groups = [self._mock_group()]
     self.assertEqual(len(self.chaos._termination_eligible_instances()), 3)
Ejemplo n.º 4
0
    def test_create_userdata_with_eip(self, **kwargs):
        """
        create_userdata sets 'eip' key when an EIP is required
        """
        config_dict = get_default_config_dict()
        eip = "54.201.250.76"
        config_dict["mhcunittest"]["eip"] = eip
        aws = DiscoAWS(config=get_mock_config(config_dict), environment_name=TEST_ENV_NAME)

        user_data = aws.create_userdata(hostclass="mhcunittest", owner="unittestuser", testing=False)
        self.assertEqual(user_data["eip"], eip)
Ejemplo n.º 5
0
    def test_create_userdata_with_eip(self, **kwargs):
        """
        create_userdata sets 'eip' key when an EIP is required
        """
        config_dict = get_default_config_dict()
        eip = "54.201.250.76"
        config_dict["mhcunittest"]["eip"] = eip
        aws = DiscoAWS(config=get_mock_config(config_dict),
                       environment_name=TEST_ENV_NAME)

        user_data = aws.create_userdata(hostclass="mhcunittest",
                                        owner="unittestuser",
                                        testing=False)
        self.assertEqual(user_data["eip"], eip)
Ejemplo n.º 6
0
 def _get_elb_config(self):
     config = get_default_config_dict()
     config["mhcelb"] = {
         "subnet": "intranet",
         "security_group": "intranet",
         "ssh_key_name": "unittestkey",
         "instance_profile_name": "unittestprofile",
         "public_ip": "False",
         "ip_address": None,
         "eip": None,
         "route": None,
         "source_dest_check": "yes",
         "domain_name": "example.com",
         "elb": "yes",
         "elb_health_check_url": "/foo"
     }
     return get_mock_config(config)
Ejemplo n.º 7
0
 def _get_elb_config(self):
     config = get_default_config_dict()
     config["mhcelb"] = {
         "subnet": "intranet",
         "security_group": "intranet",
         "ssh_key_name": "unittestkey",
         "instance_profile_name": "unittestprofile",
         "public_ip": "False",
         "ip_address": None,
         "eip": None,
         "route": None,
         "source_dest_check": "yes",
         "domain_name": "example.com",
         "elb": "yes",
         "elb_health_check_url": "/foo"
     }
     return get_mock_config(config)
Ejemplo n.º 8
0
    def test_get_alarm_config_elb_metric(self):
        """Test DiscoAlarmsConfig get_alarms for ELB metrics"""
        disco_alarms_config = DiscoAlarmsConfig(ENVIRONMENT)
        disco_alarms_config.config = get_mock_config({
            'reporting.AWS/ELB.HealthyHostCount.mhcbanana': {
                'threshold_min': '1',
                'duration': '60',
                'period': '5',
                'statistic': 'Minimum',
                'custom_metric': 'false',
                'level': 'critical'
            }
        })

        alarm_configs = disco_alarms_config.get_alarms('mhcbanana')
        self.assertEqual(1, len(alarm_configs))
        self.assertEquals({'LoadBalancerName': 'testenv-mhcbanana'},
                          alarm_configs[0].dimensions)
Ejemplo n.º 9
0
    def test_get_alarm_config(self):
        """Test DiscoAlarmsConfig get_alarms for regular metrics"""
        disco_alarms_config = DiscoAlarmsConfig(ENVIRONMENT)
        disco_alarms_config.config = get_mock_config({
            'reporting.EC2.CPU.mhcrasberi': {
                'log_pattern_metric': 'false',
                'threshold_max': '1',
                'duration': '60',
                'period': '5',
                'statistic': 'average',
                'custom_metric': 'false',
                'level': 'critical'
            }
        })

        alarm_configs = disco_alarms_config.get_alarms('mhcrasberi')
        self.assertEqual(1, len(alarm_configs))
        self.assertEquals('EC2', alarm_configs[0].namespace)
        self.assertEquals('CPU', alarm_configs[0].metric_name)
Ejemplo n.º 10
0
    def setUp(self):
        self.log_metrics = DiscoLogMetrics('test-env')
        self.log_metrics.logs = MagicMock()

        config_mock = PropertyMock(return_value=get_mock_config({
            'mhcdummy.metric_name': {
                'log_file': '/error_log',
                'filter_pattern': 'error',
                'metric_value': 1
            }
        }))
        type(self.log_metrics).config = config_mock

        # pylint: disable=C0103
        def _describe_log_groups(logGroupNamePrefix):
            if logGroupNamePrefix == 'test-env/':  # getting all log metrics in env
                return {
                    'logGroups': [{
                        'logGroupName': 'test-env/mhcdummy/info_log'
                    }, {
                        'logGroupName':
                        'test-env/mhcbanana/warning_log'
                    }]
                }

            else:  # getting all log metrics for hostclass
                return {
                    'logGroups': [{
                        'logGroupName': 'test-env/mhcdummy/info_log'
                    }]
                }

        # pylint: disable=C0103
        def _describe_metric_filters(logGroupName):
            if logGroupName == 'test-env/mhcdummy/info_log':
                return {'metricFilters': [{'filterName': 'mhcdummy_metric'}]}
            elif logGroupName == 'test-env/mhcbanana/warning_log':
                return {'metricFilters': [{'filterName': 'mhcbanana_metric'}]}

        self.log_metrics.logs.describe_log_groups.side_effect = _describe_log_groups
        self.log_metrics.logs.describe_metric_filters.side_effect = _describe_metric_filters
Ejemplo n.º 11
0
    def test_provision_hc_with_chaos_using_config(self, mock_config, **kwargs):
        """
        Provision creates the proper launch configuration and autoscaling group with chaos from config
        """
        config_dict = get_default_config_dict()
        config_dict["mhcunittest"]["chaos"] = "True"
        aws = DiscoAWS(config=get_mock_config(config_dict),
                       environment_name=TEST_ENV_NAME)
        mock_ami = self._get_image_mock(aws)
        aws.log_metrics = MagicMock()
        aws.update_elb = MagicMock(return_value=None)

        with patch("disco_aws_automation.DiscoAWS.get_meta_network",
                   return_value=_get_meta_network_mock()):
            with patch("boto.ec2.connection.EC2Connection.get_all_snapshots",
                       return_value=[]):
                with patch(
                        "disco_aws_automation.DiscoAWS.create_scaling_schedule",
                        return_value=None):
                    with patch(
                            "boto.ec2.autoscale.AutoScaleConnection.create_or_update_tags",
                            return_value=None):
                        metadata = aws.provision(ami=mock_ami,
                                                 hostclass="mhcunittest",
                                                 owner="unittestuser",
                                                 min_size=1,
                                                 desired_size=1,
                                                 max_size=1)

        self.assertEqual(metadata["hostclass"], "mhcunittest")
        self.assertFalse(metadata["no_destroy"])
        self.assertTrue(metadata["chaos"])
        _lc = aws.autoscale.get_configs()[0]
        self.assertRegexpMatches(_lc.name, r".*_mhcunittest_[0-9]*")
        self.assertEqual(_lc.image_id, mock_ami.id)
        self.assertTrue(aws.autoscale.has_group("mhcunittest"))
        _ag = aws.autoscale.get_groups()[0]
        self.assertEqual(_ag.name, "unittestenv_mhcunittest")
        self.assertEqual(_ag.min_size, 1)
        self.assertEqual(_ag.max_size, 1)
        self.assertEqual(_ag.desired_capacity, 1)
Ejemplo n.º 12
0
    def test_get_alarm_config_log_pattern_metric(self):
        """Test DiscoAlarmsConfig get_alarms for log pattern metrics"""
        disco_alarms_config = DiscoAlarmsConfig(ENVIRONMENT)
        disco_alarms_config.config = get_mock_config({
            'reporting.LogMetrics.ErrorCount.mhcrasberi': {
                'log_pattern_metric': 'true',
                'threshold_max': '1',
                'duration': '60',
                'period': '5',
                'statistic': 'average',
                'custom_metric': 'false',
                'level': 'critical'
            }
        })

        alarm_configs = disco_alarms_config.get_alarms('mhcrasberi')
        self.assertEqual(1, len(alarm_configs))
        self.assertEquals('LogMetrics/' + ENVIRONMENT,
                          alarm_configs[0].namespace)
        self.assertEquals('mhcrasberi-ErrorCount',
                          alarm_configs[0].metric_name)
Ejemplo n.º 13
0
 def setUp(self):
     config_dict = get_default_config_dict()
     self.chaos = DiscoChaos(config=get_mock_config(config_dict),
                             environment_name=TEST_ENV_NAME,
                             level=25.0, retainage=30.0)
     self.chaos._disco_aws = create_autospec(DiscoAWS)
Ejemplo n.º 14
0
    def setUp(self):
        self.elasticache = DiscoElastiCache(
            vpc=_get_mock_vpc(), aws=_get_mock_aws(), route53=_get_mock_route53())
        self.elasticache.route53 = MagicMock()

        type(self.elasticache).config = PropertyMock(return_value=get_mock_config({
            'unittest:new-cache': {
                'instance_type': 'cache.m1.small',
                'engine': 'redis',
                'engine_version': '2.8.6',
                'port': '1000',
                'parameter_group': 'default',
                'num_nodes': '5',
                'auto_failover': 'true'
            },
            'unittest:old-cache': {
                'instance_type': 'cache.m1.small',
                'engine': 'redis',
                'engine_version': '2.8.6',
                'port': '1000',
                'parameter_group': 'default',
                'num_nodes': '5',
                'auto_failover': 'true'
            }
        }))

        self.elasticache.conn = MagicMock()

        self.replication_groups = {
            'unittest-old-cache': {
                'ReplicationGroupId': 'unittest-old-cache',
                'NodeGroups': [{
                    'PrimaryEndpoint': {
                        'Address': 'old-cache.example.com'
                    }
                }]
            },
            'unittest-cache2': {
                'ReplicationGroupId': 'unittest-cache2',
                'NodeGroups': [{
                    'PrimaryEndpoint': {
                        'Address': 'cache2.example.com'
                    }
                }]
            },
            'unittest2-cache': {
                'ReplicationGroupId': 'unittest2-cache'
            }
        }

        def _create_replication_group(**kwargs):
            self.replication_groups[kwargs['ReplicationGroupId']] = {
                'ReplicationGroupId': kwargs['ReplicationGroupId'],
                'NodeGroups': [{
                    'PrimaryEndpoint': {
                        'Address': 'foo.example.com'
                    }
                }]
            }

        # pylint doesn't like Boto3's argument names
        # pylint: disable=C0103
        def _describe_replication_groups(ReplicationGroupId=None):
            if ReplicationGroupId in self.replication_groups.keys():
                return {
                    'ReplicationGroups': [self.replication_groups[ReplicationGroupId]]
                }
            elif ReplicationGroupId is None:
                return {
                    'ReplicationGroups': self.replication_groups.values()
                }

        # pylint: disable=C0103
        def _describe_cache_subnet_groups(CacheSubnetGroupName=None):
            if CacheSubnetGroupName:
                return {
                    'CacheSubnetGroups': [{
                        'CacheSubnetGroupName': 'unittest-intranet'
                    }]
                }
            elif CacheSubnetGroupName is None:
                return {
                    'CacheSubnetGroups': [{
                        'CacheSubnetGroupName': 'unittest-intranet'
                    }, {
                        'CacheSubnetGroupName': 'unittest-build'
                    }]
                }

        self.elasticache.conn.describe_replication_groups.side_effect = _describe_replication_groups
        self.elasticache.conn.describe_cache_subnet_groups.side_effect = _describe_cache_subnet_groups
        self.elasticache.conn.create_replication_group.side_effect = _create_replication_group
Ejemplo n.º 15
0
    def setUp(self):
        self.elasticache = DiscoElastiCache(vpc=_get_mock_vpc(),
                                            aws=_get_mock_aws(),
                                            route53=_get_mock_route53())
        self.elasticache.route53 = MagicMock()

        DiscoElastiCache.config = PropertyMock(return_value=get_mock_config({
            'unittest:new-cache': {
                'instance_type': 'cache.m1.small',
                'engine': 'redis',
                'engine_version': '2.8.6',
                'port': '1000',
                'parameter_group': 'default',
                'num_nodes': '5',
                'auto_failover': 'true'
            },
            'unittest:old-cache': {
                'instance_type': 'cache.m1.small',
                'engine': 'redis',
                'engine_version': '2.8.6',
                'port': '1000',
                'parameter_group': 'default',
                'num_nodes': '5',
                'auto_failover': 'true'
            }
        }))

        self.elasticache.conn = MagicMock()

        self.replication_groups = [{
            'ReplicationGroupId':
            self.elasticache._get_redis_replication_group_id('old-cache'),
            'Description':
            'unittest-old-cache',
            'Status':
            'available',
            'NodeGroups': [{
                'PrimaryEndpoint': {
                    'Address': 'old-cache.example.com'
                }
            }]
        }, {
            'ReplicationGroupId':
            self.elasticache._get_redis_replication_group_id('cache2'),
            'Description':
            'unittest-cache2',
            'Status':
            'available',
            'NodeGroups': [{
                'PrimaryEndpoint': {
                    'Address': 'cache2.example.com'
                }
            }]
        }, {
            'ReplicationGroupId':
            self.elasticache._get_redis_replication_group_id('cache'),
            'Description':
            'unittest2-cache',
            'Status':
            'available'
        }]

        def _create_replication_group(**kwargs):
            self.replication_groups.append({
                'ReplicationGroupId':
                kwargs['ReplicationGroupId'],
                'NodeGroups': [{
                    'PrimaryEndpoint': {
                        'Address': 'foo.example.com'
                    }
                }]
            })

        # pylint doesn't like Boto3's argument names
        # pylint: disable=C0103
        def _describe_replication_groups(ReplicationGroupId=None):
            if ReplicationGroupId is None:
                return {'ReplicationGroups': self.replication_groups}
            else:
                found_groups = [
                    group for group in self.replication_groups
                    if group['ReplicationGroupId'] == ReplicationGroupId
                ]
                return {'ReplicationGroups': found_groups}

        # pylint: disable=C0103
        def _describe_cache_subnet_groups(CacheSubnetGroupName=None):
            if CacheSubnetGroupName:
                return {
                    'CacheSubnetGroups': [{
                        'CacheSubnetGroupName':
                        'unittest-intranet'
                    }]
                }
            elif CacheSubnetGroupName is None:
                return {
                    'CacheSubnetGroups': [{
                        'CacheSubnetGroupName':
                        'unittest-intranet'
                    }, {
                        'CacheSubnetGroupName':
                        'unittest-build'
                    }]
                }

        self.elasticache.conn.describe_replication_groups.side_effect = _describe_replication_groups
        self.elasticache.conn.describe_cache_subnet_groups.side_effect = _describe_cache_subnet_groups
        self.elasticache.conn.create_replication_group.side_effect = _create_replication_group