def test_apply(self): with patch('%s._validate' % pb): cls = TerraformRunner(self.mock_config(), 'terraform-bin') with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s._set_remote' % pb, autospec=True) as mock_set: with patch('%s._setup_tf' % pb, autospec=True) as mock_setup: with patch('%s._run_tf' % pb, autospec=True) as mock_run: with patch('%s._taint_deployment' % pb, autospec=True) as mock_taint: mock_run.return_value = 'output' with patch('%s._show_outputs' % pb, autospec=True) as mock_show: cls.apply() assert mock_setup.mock_calls == [call(cls, stream=False)] assert mock_set.mock_calls == [] assert mock_run.mock_calls == [ call(cls, 'apply', cmd_args=['-input=false', '-refresh=true', '.'], stream=False) ] assert mock_logger.mock_calls == [ call.warning('Running terraform apply: %s', '-input=false -refresh=true .'), call.warning("Terraform apply finished successfully:\n%s", 'output') ] assert mock_show.mock_calls == [call(cls)] assert mock_taint.mock_calls == [call(cls, stream=False)]
def test_apply_stream(self): def se_exc(*args, **kwargs): raise Exception('foo') with patch('%s._validate' % pb): cls = TerraformRunner(self.mock_config(), 'terraform-bin') with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s._set_remote' % pb, autospec=True) as mock_set: with patch('%s._run_tf' % pb, autospec=True) as mock_run: with patch('%s._taint_deployment' % pb, autospec=True) as mock_taint: mock_run.return_value = 'output' mock_taint.side_effect = se_exc with patch('%s._show_outputs' % pb, autospec=True) as mock_show: cls.apply(stream=True) assert mock_set.mock_calls == [call(cls, stream=True)] assert mock_run.mock_calls == [ call(cls, 'apply', cmd_args=['-input=false', '-refresh=true', '.'], stream=True) ] assert mock_logger.mock_calls == [ call.warning('Running terraform apply: %s', '-input=false -refresh=true .'), call.warning("Terraform apply finished successfully.") ] assert mock_show.mock_calls == [call(cls)] assert mock_taint.mock_calls == [call(cls, stream=True)]
def test_generate(self): with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s._get_config' % pb, autospec=True) as mock_get: with patch('%s.open' % pbm, mock_open(), create=True) as m_open: with patch('%s._write_zip' % pb, autospec=True) as mock_zip: mock_get.return_value = 'myjson' self.cls.generate('myfunc') assert mock_get.mock_calls == [call(self.cls, 'myfunc')] assert m_open.mock_calls == [ call('./webhook2lambda2sqs_func.py', 'w'), call().__enter__(), call().write('myfunc'), call().__exit__(None, None, None), call('./webhook2lambda2sqs.tf.json', 'w'), call().__enter__(), call().write('myjson'), call().__exit__(None, None, None) ] assert mock_zip.mock_calls == [ call(self.cls, 'myfunc', './webhook2lambda2sqs_func.zip') ] assert mock_logger.mock_calls == [ call.warning('Writing lambda function source to: ' './webhook2lambda2sqs_func.py'), call.debug('lambda function written'), call.warning('Writing lambda function source zip file to: ' './webhook2lambda2sqs_func.zip'), call.debug('lambda zip written'), call.warning('Writing terraform configuration JSON to: ' './webhook2lambda2sqs.tf.json'), call.debug('terraform configuration written'), call.warning('Completed writing lambda function and TF config.') ]
def test_fail_always(self): self.config['splunk_max_attempts'] = 3 self.config['splunk_hex_max_length'] = None with patch('%s.sleep' % pbm) as mock_sleep: with patch('%s.uniform' % pbm) as mock_uniform: with patch('%s._send_splunk' % pb) as mock_send: mock_uniform.return_value = 1.2 mock_send.side_effect = RuntimeError('foo') res = self.cls._try_send({'foo': 'bar'}) assert res is False assert mock_sleep.mock_calls == [call(1.2), call(1.2), call(1.2)] assert mock_uniform.mock_calls == [call(1, 4), call(1, 4), call(1, 4)] assert mock_send.mock_calls == [ call('{"foo": "bar"}'), call('{"foo": "bar"}'), call('{"foo": "bar"}') ] assert self.mock_logger.mock_calls == [ call.warning( 'Caught exception sending to Splunk; retry in %s seconds', 1.2), call.warning( 'Caught exception sending to Splunk; retry in %s seconds', 1.2), call.warning( 'Caught exception sending to Splunk; retry in %s seconds', 1.2), call.error('ERROR - Could not POST to Splunk after %d tries.', 3) ]
def test_run_tls(self): self.cls.reactor = Mock(spec_set=reactor) self.cls.tls_factory = Mock() with patch.multiple(pbm, logger=DEFAULT, Site=DEFAULT, LoopingCall=DEFAULT, VaultRedirectorSite=DEFAULT) as mod_mocks: with patch.multiple(pb, get_active_node=DEFAULT, run_reactor=DEFAULT, listentcp=DEFAULT, add_update_loop=DEFAULT, listentls=DEFAULT) as cls_mocks: cls_mocks['get_active_node'].return_value = 'consul:1234' self.cls.run() assert self.cls.active_node_ip_port == 'consul:1234' assert mod_mocks['logger'].mock_calls == [ call.warning('Initial Vault active node: %s', 'consul:1234'), call.warning('Starting Twisted reactor (event loop)') ] assert mod_mocks['VaultRedirectorSite'].mock_calls == [call(self.cls)] assert mod_mocks['Site'].mock_calls == [ call(mod_mocks['VaultRedirectorSite'].return_value) ] assert self.cls.reactor.mock_calls == [] assert cls_mocks['run_reactor'].mock_calls == [call()] assert mod_mocks['LoopingCall'].mock_calls == [] assert cls_mocks['listentls'].mock_calls == [ call(mod_mocks['Site'].return_value) ] assert cls_mocks['add_update_loop'].mock_calls == [call()] assert cls_mocks['listentcp'].mock_calls == []
def test_none(self): self.cls.refresh_timeout = None check_dt = datetime(2016, 12, 16, hour=10, minute=30, second=12, tzinfo=utc) now_dt = datetime(2016, 12, 16, hour=11, minute=30, second=12, tzinfo=utc) statuses = [ {'statuses': [{'status': 'none'}]}, {'statuses': [{'status': 'enqueued'}]}, {'statuses': [{'status': 'processing'}]}, {'statuses': [{'status': 'none'}]} ] m_s = self.mock_conn.describe_trusted_advisor_check_refresh_statuses with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s.sleep' % pbm, autospec=True) as mock_sleep: with patch('%s._get_check_result' % pb, autospec=True) as gcr: with patch('%s.datetime_now' % pbm) as mock_dt_now: mock_dt_now.return_value = now_dt m_s.side_effect = statuses gcr.return_value = ({'foo': 'bar'}, check_dt) res = self.cls._poll_for_refresh('abc123') assert res == {'foo': 'bar'} assert self.mock_conn.mock_calls == [ call.describe_trusted_advisor_check_refresh_statuses( checkIds=['abc123']), call.describe_trusted_advisor_check_refresh_statuses( checkIds=['abc123']), call.describe_trusted_advisor_check_refresh_statuses( checkIds=['abc123']), call.describe_trusted_advisor_check_refresh_statuses( checkIds=['abc123']) ] assert gcr.mock_calls == [call(self.cls, 'abc123')] assert mock_sleep.mock_calls == [ call(30), call(30), call(30) ] assert mock_dt_now.mock_calls == [ call(), call(), call(), call(), call() ] assert mock_logger.mock_calls == [ call.warning('Polling for TA check %s refresh...', 'abc123'), call.debug('Checking refresh status'), call.info('Refresh status: %s; sleeping 30s', 'none'), call.debug('Checking refresh status'), call.info('Refresh status: %s; sleeping 30s', 'enqueued'), call.debug('Checking refresh status'), call.info('Refresh status: %s; sleeping 30s', 'processing'), call.debug('Checking refresh status'), call.warning('Trusted Advisor check refresh status went ' 'from "%s" to "%s"; refresh is either complete ' 'or timed out on AWS side. Continuing', 'processing', 'none'), call.info('Done polling for check refresh'), call.debug('Check shows last refresh time of: %s', check_dt) ]
def test_init_nondefault(self): with patch('%s.setup_signal_handlers' % pb) as mock_setup_signals: with patch('%s.get_tls_factory' % pb) as mock_get_tls: with patch('%s.logger' % pbm) as mock_logger: with patch('%s.getpid' % pbm) as mock_getpid: mock_getpid.return_value = 12345 cls = VaultRedirector( 'consul:123', redir_to_https=True, redir_to_ip=True, log_disable=True, poll_interval=1.234, bind_port=1234, check_id='foo:bar' ) assert mock_setup_signals.mock_calls == [call()] assert mock_logger.mock_calls == [ call.warning( 'Starting VaultRedirector with ALL LOGGING DISABLED; send ' 'SIGUSR1 to PID %d enable logging.', 12345 ) ] assert mock_get_tls.mock_calls == [] assert cls.active_node_ip_port is None assert cls.last_poll_time is None assert cls.consul_host_port == 'consul:123' assert cls.redir_https is True assert cls.redir_ip is True assert cls.log_enabled is False assert cls.poll_interval == 1.234 assert cls.bind_port == 1234 assert cls.check_id == 'foo:bar' assert cls.consul_scheme == 'https'
def test_get_limit_check_id_subscription_required(self): def se_api(language=None): response = { 'ResponseMetadata': { 'HTTPStatusCode': 400, 'RequestId': '3cc9b2a8-c6e5-11e5-bc1d-b13dcea36176' }, 'Error': { 'Message': 'AWS Premium Support Subscription is required ' 'to use this service.', 'Code': 'SubscriptionRequiredException' } } raise ClientError(response, 'operation') assert self.cls.have_ta is True self.mock_conn.describe_trusted_advisor_checks.side_effect = se_api with patch('awslimitchecker.trustedadvisor' '.logger', autospec=True) as mock_logger: res = self.cls._get_limit_check_id() assert self.cls.have_ta is False assert res == (None, None) assert self.mock_conn.mock_calls == [ call.describe_trusted_advisor_checks(language='en') ] assert mock_logger.mock_calls == [ call.debug("Querying Trusted Advisor checks"), call.warning("Cannot check TrustedAdvisor: %s", 'AWS Premium Support Subscription is required to ' 'use this service.') ]
def test_load_hard_coded_settings(self, mock_messagebar, mock_join, mock_hardcoded_template): self.midvatten.ms.settingsdict['secplot_loaded_template'] = '' self.midvatten.ms.settingsdict['secplot_templates'] = '' mock_join.return_value = '' test_dict = {"hardcoded": 1} mock_hardcoded_template.return_value = test_dict secplottemplates = PlotTemplates( self.sectionplot, self.template_list, self.edit_button, self.load_button, self.save_as_button, self.import_button, self.remove_button, self.template_folder, 'secplot_templates', 'secplot_loaded_template', defs.secplot_default_template(), self.midvatten.ms) test = utils.anything_to_string_representation( secplottemplates.loaded_template) reference = utils.anything_to_string_representation(test_dict) assert call.warning( bar_msg= 'Default template not found, loading hard coded default template.' ) in mock_messagebar.mock_calls assert call.info( log_msg='Loaded template from default hard coded template.' ) in mock_messagebar.mock_calls assert test == reference
def test_init_no_sensors(self): with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch.multiple( pb, autospec=True, find_host_id=DEFAULT, discover_engine=DEFAULT, discover_sensors=DEFAULT, ) as mocks: with patch('%s._list_classes' % pbm, autospec=True) as mock_list: mocks['find_host_id'].return_value = 'myhostid' mocks['discover_engine'].return_value = ( 'foo.bar.baz', 1234 ) with pytest.raises(SystemExit) as excinfo: SensorDaemon() assert mock_logger.mock_calls == [ call.warning('This machine running with host_id %s', 'myhostid'), call.critical('ERROR - no sensors discovered.') ] assert mocks['find_host_id'].call_count == 1 assert mocks['discover_engine'].call_count == 1 assert mocks['discover_sensors'].call_count == 1 assert excinfo.value.code == 1 assert mock_list.mock_calls == []
def test_update_limits_from_api_invalid_region_503(self): resp = { 'ResponseMetadata': { 'HTTPStatusCode': 503, 'RequestId': '7d74c6f0-c789-11e5-82fe-a96cdaa6d564' }, 'Error': { 'Message': 'Service Unavailable', 'Code': '503' } } ce = ClientError(resp, 'GetSendQuota') def se_get(): raise ce mock_conn = Mock() mock_conn.get_send_quota.side_effect = se_get with patch('%s.connect' % pb) as mock_connect: with patch('%s.logger' % pbm) as mock_logger: cls = _SesService(21, 43, {}, None) cls.conn = mock_conn cls._update_limits_from_api() assert mock_connect.mock_calls == [call()] assert mock_conn.mock_calls == [call.get_send_quota()] assert mock_logger.mock_calls == [call.warning('Skipping SES: %s', ce)] assert cls.limits['Daily sending quota'].api_limit is None
def test_unknown_but_tolerated_category(self, val): with patch('n6lib.auth_db.fields.LOGGER') as LOGGER_mock: self._test_proper_values(CriteriaCategory, {'category': val}, expecting_stripped_string=True) self.assertEqual(LOGGER_mock.mock_calls, [ call.warning(CategoryCustomizedField.warning_msg_template, val.strip()), ])
def test_init_default(self): sensors = [Mock(), Mock()] with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch.multiple( pb, autospec=True, find_host_id=DEFAULT, discover_engine=DEFAULT, discover_sensors=DEFAULT, ) as mocks: with patch('%s._list_classes' % pbm, autospec=True) as mock_list: mocks['find_host_id'].return_value = 'myhostid' mocks['discover_engine'].return_value = ('foo.bar.baz', 1234) mocks['discover_sensors'].return_value = sensors cls = SensorDaemon() assert cls.dry_run is False assert cls.dummy_data is False assert cls.engine_port == 1234 assert cls.engine_addr == 'foo.bar.baz' assert cls.interval == 60.0 assert cls.host_id == 'myhostid' assert cls.sensors == sensors assert mock_logger.mock_calls == [ call.warning('This machine running with host_id %s', 'myhostid') ] assert mocks['find_host_id'].mock_calls == [call(cls)] assert mocks['discover_engine'].mock_calls == [call(cls)] assert mocks['discover_sensors'].mock_calls == [call(cls, {})] assert mock_list.mock_calls == []
def test_mode_int_within_threshold(self): self.cls.refresh_mode = 120 # 2 minutes check_dt = datetime(2016, 12, 16, hour=10, minute=40, second=12, tzinfo=utc) with patch('%s._get_check_result' % pb, autospec=True) as mock_gcr: with patch('%s._can_refresh_check' % pb, autospec=True) as mock_crc: with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s._poll_for_refresh' % pb, autospec=True) as mock_pfr: mock_gcr.return_value = ({'mock': 'gcr'}, check_dt) mock_pfr.return_value = {'mock': 'pfr'} mock_crc.return_value = True res = self.cls._get_refreshed_check_result('abc123') assert res == {'mock': 'gcr'} assert mock_gcr.mock_calls == [ call(self.cls, 'abc123'), call(self.cls, 'abc123') ] assert mock_crc.mock_calls == [call(self.cls, 'abc123')] assert mock_pfr.mock_calls == [] assert mock_logger.mock_calls == [ call.debug('Handling refresh of check: %s', 'abc123'), call.debug('ta_refresh_mode older; check last refresh: %s; ' 'threshold=%d seconds', check_dt, 120), call.warning('Trusted Advisor check %s last refresh time of %s ' 'is newer than refresh threshold of %d seconds.', 'abc123', datetime(2016, 12, 16, 10, 40, 12, tzinfo=utc), 120) ]
def test_get_limit_check_id_subscription_required(self): def se_api(language=None): response = { 'ResponseMetadata': { 'HTTPStatusCode': 400, 'RequestId': '3cc9b2a8-c6e5-11e5-bc1d-b13dcea36176' }, 'Error': { 'Message': 'AWS Premium Support Subscription is required ' 'to use this service.', 'Code': 'SubscriptionRequiredException' } } raise ClientError(response, 'operation') assert self.cls.have_ta is True self.mock_conn.describe_trusted_advisor_checks.side_effect = se_api with patch('awslimitchecker.trustedadvisor' '.logger', autospec=True) as mock_logger: res = self.cls._get_limit_check_id() assert self.cls.have_ta is False assert res == (None, None) assert self.mock_conn.mock_calls == [ call.describe_trusted_advisor_checks(language='en') ] assert mock_logger.mock_calls == [ call.debug("Querying Trusted Advisor checks"), call.warning( "Cannot check TrustedAdvisor: %s", 'AWS Premium Support Subscription is required to ' 'use this service.') ]
def test_find_usage_invalid_region_503(self): resp = { 'ResponseMetadata': { 'HTTPStatusCode': 503, 'RequestId': '7d74c6f0-c789-11e5-82fe-a96cdaa6d564' }, 'Error': { 'Message': 'Service Unavailable', 'Code': '503' } } ce = ClientError(resp, 'GetSendQuota') def se_get(): raise ce mock_conn = Mock() mock_conn.get_send_quota.side_effect = se_get with patch('%s.connect' % pb) as mock_connect: with patch('%s.logger' % pbm) as mock_logger: cls = _SesService(21, 43) cls.conn = mock_conn assert cls._have_usage is False cls.find_usage() assert mock_connect.mock_calls == [call()] assert cls._have_usage is False assert mock_logger.mock_calls == [ call.debug('Checking usage for service %s', 'SES'), call.warning( 'Skipping SES: %s', ce ) ] assert mock_conn.mock_calls == [call.get_send_quota()] assert len(cls.limits['Daily sending quota'].get_current_usage()) == 0
def test_update_limits_from_api_invalid_region_503(self): resp = { 'ResponseMetadata': { 'HTTPStatusCode': 503, 'RequestId': '7d74c6f0-c789-11e5-82fe-a96cdaa6d564' }, 'Error': { 'Message': 'Service Unavailable', 'Code': '503' } } ce = ClientError(resp, 'GetSendQuota') def se_get(): raise ce mock_conn = Mock() mock_conn.get_send_quota.side_effect = se_get with patch('%s.connect' % pb) as mock_connect: with patch('%s.logger' % pbm) as mock_logger: cls = _SesService(21, 43) cls.conn = mock_conn cls._update_limits_from_api() assert mock_connect.mock_calls == [call()] assert mock_conn.mock_calls == [call.get_send_quota()] assert mock_logger.mock_calls == [ call.warning('Skipping SES: %s', ce) ] assert cls.limits['Daily sending quota'].api_limit is None
def test_find_usage_invalid_region_503(self): resp = { 'ResponseMetadata': { 'HTTPStatusCode': 503, 'RequestId': '7d74c6f0-c789-11e5-82fe-a96cdaa6d564' }, 'Error': { 'Message': 'Service Unavailable', 'Code': '503' } } ce = ClientError(resp, 'GetSendQuota') def se_get(): raise ce mock_conn = Mock() mock_conn.get_send_quota.side_effect = se_get with patch('%s.connect' % pb) as mock_connect: with patch('%s.logger' % pbm) as mock_logger: cls = _SesService(21, 43, {}, None) cls.conn = mock_conn assert cls._have_usage is False cls.find_usage() assert mock_connect.mock_calls == [call()] assert cls._have_usage is False assert mock_logger.mock_calls == [ call.debug('Checking usage for service %s', 'SES'), call.warning('Skipping SES: %s', ce) ] assert mock_conn.mock_calls == [call.get_send_quota()] assert len(cls.limits['Daily sending quota'].get_current_usage()) == 0
def test_get_limit_check_id_subscription_required(self): def se_api(language): status = 400 reason = 'Bad Request' body = { 'message': 'AWS Premium Support Subscription is required to ' 'use this service.', '__type': 'SubscriptionRequiredException' } raise JSONResponseError(status, reason, body) self.mock_conn.describe_trusted_advisor_checks.side_effect = se_api assert self.cls.have_ta is True with patch('awslimitchecker.trustedadvisor' '.logger', autospec=True) as mock_logger: res = self.cls._get_limit_check_id() assert self.cls.have_ta is False assert res == (None, None) assert self.mock_conn.mock_calls == [ call.describe_trusted_advisor_checks('en') ] assert mock_logger.mock_calls == [ call.debug("Querying Trusted Advisor checks"), call.warning("Cannot check TrustedAdvisor: %s", "AWS Premium Support " "Subscription is required to use this service.") ]
def test_init_default(self): sensors = [Mock(), Mock()] with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch.multiple( pb, autospec=True, find_host_id=DEFAULT, discover_engine=DEFAULT, discover_sensors=DEFAULT, ) as mocks: with patch('%s._list_classes' % pbm, autospec=True) as mock_list: mocks['find_host_id'].return_value = 'myhostid' mocks['discover_engine'].return_value = ( 'foo.bar.baz', 1234 ) mocks['discover_sensors'].return_value = sensors cls = SensorDaemon() assert cls.dry_run is False assert cls.dummy_data is False assert cls.engine_port == 1234 assert cls.engine_addr == 'foo.bar.baz' assert cls.interval == 60.0 assert cls.host_id == 'myhostid' assert cls.sensors == sensors assert mock_logger.mock_calls == [ call.warning('This machine running with host_id %s', 'myhostid') ] assert mocks['find_host_id'].mock_calls == [call(cls)] assert mocks['discover_engine'].mock_calls == [call(cls)] assert mocks['discover_sensors'].mock_calls == [call(cls, {})] assert mock_list.mock_calls == []
def test_init_no_sensors(self): with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch.multiple( pb, autospec=True, find_host_id=DEFAULT, discover_engine=DEFAULT, discover_sensors=DEFAULT, ) as mocks: with patch('%s._list_classes' % pbm, autospec=True) as mock_list: mocks['find_host_id'].return_value = 'myhostid' mocks['discover_engine'].return_value = ('foo.bar.baz', 1234) with pytest.raises(SystemExit) as excinfo: SensorDaemon() assert mock_logger.mock_calls == [ call.warning('This machine running with host_id %s', 'myhostid'), call.critical('ERROR - no sensors discovered.') ] assert mocks['find_host_id'].call_count == 1 assert mocks['discover_engine'].call_count == 1 assert mocks['discover_sensors'].call_count == 1 assert excinfo.value.code == 1 assert mock_list.mock_calls == []
def test_show_one_queue_empty(self, capsys): conn = Mock() conn.receive_message.return_value = {} with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s._url_for_queue' % pb, autospec=True) as mock_url: mock_url.return_value = 'myurl' with patch('%s._delete_msg' % pb, autospec=True) as mock_del: self.cls._show_one_queue(conn, 'foo', 1, delete=True) out, err = capsys.readouterr() assert err == '' expected_out = "=> Queue 'foo' appears empty.\n" assert out == expected_out assert mock_del.mock_calls == [] assert conn.mock_calls == [ call.receive_message( QueueUrl='myurl', AttributeNames=['All'], MessageAttributeNames=['All'], MaxNumberOfMessages=1, WaitTimeSeconds=20 ) ] assert mock_url.mock_calls == [ call(self.cls, conn, 'foo') ] assert mock_logger.mock_calls == [ call.debug("Queue '%s' url: %s", 'foo', 'myurl'), call.warning("Receiving %d messages from queue'%s'; this may " "take up to 20 seconds.", 1, 'foo'), call.debug('received no messages') ]
def test_init_nondefault(self): with patch('%s.setup_signal_handlers' % pb) as mock_setup_signals: with patch('%s.get_tls_factory' % pb) as mock_get_tls: with patch('%s.logger' % pbm) as mock_logger: with patch('%s.getpid' % pbm) as mock_getpid: mock_getpid.return_value = 12345 cls = VaultRedirector('consul:123', redir_to_https=True, redir_to_ip=True, log_disable=True, poll_interval=1.234, bind_port=1234, check_id='foo:bar') assert mock_setup_signals.mock_calls == [call()] assert mock_logger.mock_calls == [ call.warning( 'Starting VaultRedirector with ALL LOGGING DISABLED; send ' 'SIGUSR1 to PID %d enable logging.', 12345) ] assert mock_get_tls.mock_calls == [] assert cls.active_node_ip_port is None assert cls.last_poll_time is None assert cls.consul_host_port == 'consul:123' assert cls.redir_https is True assert cls.redir_ip is True assert cls.log_enabled is False assert cls.poll_interval == 1.234 assert cls.bind_port == 1234 assert cls.check_id == 'foo:bar' assert cls.consul_scheme == 'https'
def test_find_usage_spot_instances(self): data = fixtures.test_find_usage_spot_instances mock_conn = Mock() mock_client_conn = Mock() mock_client_conn.describe_spot_instance_requests.return_value = data cls = _Ec2Service(21, 43) cls.resource_conn = mock_conn cls.conn = mock_client_conn with patch('awslimitchecker.services.ec2.logger') as mock_logger: cls._find_usage_spot_instances() assert mock_conn.mock_calls == [] assert mock_client_conn.mock_calls == [ call.describe_spot_instance_requests() ] lim = cls.limits['Max spot instance requests per region'] usage = lim.get_current_usage() assert len(usage) == 1 assert usage[0].get_value() == 2 assert mock_logger.mock_calls == [ call.debug('Getting spot instance request usage'), call.warning('EC2 spot instance support is experimental and ' 'results may not me accurate in all cases. Please ' 'see the notes at: <http://awslimitchecker' '.readthedocs.io/en/latest/limits.html#ec2>'), call.debug('NOT counting spot instance request %s state=%s', 'reqID1', 'closed'), call.debug('Counting spot instance request %s state=%s', 'reqID2', 'active'), call.debug('Counting spot instance request %s state=%s', 'reqID3', 'open'), call.debug('NOT counting spot instance request %s state=%s', 'reqID4', 'failed'), call.debug('Setting "Max spot instance requests per region" ' 'limit (%s) current usage to: %d', lim, 2) ]
def test_get_limit_check_id_subscription_required(self): def se_api(foo, language): status = 400 reason = 'Bad Request' body = { 'message': 'AWS Premium Support Subscription is required to ' 'use this service.', '__type': 'SubscriptionRequiredException' } raise JSONResponseError(status, reason, body) assert self.cls.have_ta is True with patch('awslimitchecker.trustedadvisor' '.logger', autospec=True) as mock_logger: with patch('%s.boto_query_wrapper' % pbm) as mock_wrapper: mock_wrapper.side_effect = se_api res = self.cls._get_limit_check_id() assert self.cls.have_ta is False assert res == (None, None) assert self.mock_conn.mock_calls == [] assert mock_wrapper.mock_calls == [ call(self.mock_conn.describe_trusted_advisor_checks, 'en') ] assert mock_logger.mock_calls == [ call.debug("Querying Trusted Advisor checks"), call.warning( "Cannot check TrustedAdvisor: %s", "AWS Premium Support " "Subscription is required to use this service.") ]
def test_should_log_as_warning_instances_with_no_name_set(logger): id1 = create_instance('web_i1', add_name=False) expected_calls = [ call.warning('Instance {} has no tag Name set'.format(id1)), ] instances = get_running_instances_hostnames('*web*') logger.warning.assert_has_calls(expected_calls)
def test_delete_application_group_dry_run(self, mock_post, mock_get, mock_requests, mock_logger): groups = Groups(self.dummy_ice_url, dry_run=True) mock_logger.reset_mock() groups.delete_application_group('foo') self.assertEquals(mock_logger.mock_calls, [call.warning('Would GET deleteApplicationGroup?name=foo')] ) self.assertEquals(mock_get.mock_calls, []) self.assertEquals(mock_post.mock_calls, [])
def test_plan(self): with patch('%s._validate' % pb): cls = TerraformRunner(self.mock_config(), 'terraform-bin') with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s._set_remote' % pb, autospec=True) as mock_set: with patch('%s._run_tf' % pb, autospec=True) as mock_run: mock_run.return_value = 'output' cls.plan() assert mock_set.mock_calls == [call(cls, stream=False)] assert mock_run.mock_calls == [ call(cls, 'plan', cmd_args=['-input=false', '-refresh=true', '.'], stream=False) ] assert mock_logger.mock_calls == [ call.warning('Running terraform plan: %s', '-input=false -refresh=true .'), call.warning("Terraform plan finished successfully:\n%s", 'output') ]
def test_destroy_stream(self): with patch('%s._validate' % pb): cls = TerraformRunner(self.mock_config(), 'terraform-bin') with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s._set_remote' % pb, autospec=True) as mock_set: with patch('%s._run_tf' % pb, autospec=True) as mock_run: mock_run.return_value = 'output' cls.destroy(stream=True) assert mock_set.mock_calls == [call(cls, stream=True)] assert mock_run.mock_calls == [ call(cls, 'destroy', cmd_args=['-refresh=true', '-force', '.'], stream=True) ] assert mock_logger.mock_calls == [ call.warning('Running terraform destroy: %s', '-refresh=true -force .'), call.warning("Terraform destroy finished successfully.") ]
def test_init_dry_run(self, mock_post, mock_get, mock_requests, mock_logger): g = Groups(self.dummy_ice_url, dry_run=True) self.assertEquals(g.dry_run, True) self.assertEquals(mock_logger.mock_calls, [call.warning('DRY RUN only - will not make any changes')] ) self.assertEquals(mock_requests.mock_calls, []) self.assertEquals(mock_get.mock_calls, []) self.assertEquals(mock_post.mock_calls, [])
def test_listentcp(self): self.cls.reactor = Mock(spec_set=reactor) mock_site = Mock() with patch('%s.logger' % pbm) as mock_logger: self.cls.listentcp(mock_site) assert mock_logger.mock_calls == [ call.warning('Setting TCP listener on port %d for HTTP requests', 8080) ] assert self.cls.reactor.mock_calls == [call.listenTCP(8080, mock_site)]
def test_ice_post_dry_run(self, mock_requests, mock_logger): url = 'http://foo.com/dashboard/foobar' g = Groups('http://foo.com/', dry_run=True) mock_logger.reset_mock() res = g._ice_post('foobar', {'baz': 'blam'}) self.assertEquals(mock_logger.mock_calls, [call.warning("DRY RUN: Would POST to http://foo.com/dashboard/foobar: {'baz': 'blam'}")] ) self.assertEquals(mock_requests.mock_calls, [])
def test_warn_about_view_obs_lines_missing(self, mock_messagebar, mock_latest_version): mock_latest_version.return_value = '0.0.1' db_utils.sql_alter_db('''DROP VIEW view_obs_lines;''') utils.warn_about_old_database() print(str(mock_messagebar.mock_calls)) assert call.warning( bar_msg= 'Database is missing view_obs_points or view_obs_lines! Add these using Midvatten>Database Management>Add view_obs_points as workaround for qgis bug #20633.', duration=60) in mock_messagebar.mock_calls
def test_taint_deployment_stream(self): with patch('%s._validate' % pb): cls = TerraformRunner(self.mock_config(), 'terraform-bin') with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s._set_remote' % pb, autospec=True) as mock_set: with patch('%s._run_tf' % pb, autospec=True) as mock_run: mock_run.return_value = 'output' cls._taint_deployment(stream=True) assert mock_set.mock_calls == [] assert mock_run.mock_calls == [ call(cls, 'taint', cmd_args=['aws_api_gateway_deployment.depl'], stream=True) ] assert mock_logger.mock_calls == [ call.warning('Running terraform taint: %s as workaround for ' '<https://github.com/hashicorp/terraform/issues/6613>', 'aws_api_gateway_deployment.depl'), call.warning("Terraform taint finished successfully.") ]
def test_init_nondefault(self): dummy = Mock() with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch.multiple( pb, autospec=True, find_host_id=DEFAULT, discover_engine=DEFAULT, discover_sensors=DEFAULT, ) as mocks: with patch('%s._list_classes' % pbm, autospec=True) as mock_list: mocks['find_host_id'].return_value = 'myhostid' mocks['discover_engine'].return_value = ( 'foo.bar.baz', 1234 ) mocks['discover_sensors'].return_value = [dummy] cls = SensorDaemon( dry_run=True, dummy_data=True, engine_port=1234, engine_addr='foo.bar.baz', interval=12.34, class_args={'foo': 'bar'} ) assert cls.dry_run is True assert cls.dummy_data is True assert cls.engine_port == 1234 assert cls.engine_addr == 'foo.bar.baz' assert cls.interval == 12.34 assert cls.host_id == 'myhostid' assert cls.sensors == [dummy] assert mock_logger.mock_calls == [ call.warning('This machine running with host_id %s', 'myhostid'), call.warning("DRY RUN MODE - will not POST data to Engine.") ] assert mocks['find_host_id'].mock_calls == [call(cls)] assert mocks['discover_engine'].mock_calls == [] assert mocks['discover_sensors'].mock_calls == [ call(cls, {'foo': 'bar'}) ] assert mock_list.mock_calls == []
def test_handle_logging_signal_USR2(self): self.cls.log_enabled = True with patch('%s.logger' % pbm) as mock_logger: with patch('%s.getpid' % pbm) as mock_getpid: mock_getpid.return_value = 12345 self.cls.handle_logging_signal(signal.SIGUSR2, None) assert mock_logger.mock_calls == [ call.warning('Logging disabled via signal; send SIGUSR1 to PID %d ' 'to enable logging', 12345) ] assert self.cls.log_enabled is False
def test_save_json(self): logger = Mock() self.assertFalse(self.reddalert.save_json('/tmp', {}, logger)) self.assertFalse(self.reddalert.save_json('/tmp' * 100, {'foo': 'bar'}, logger)) self.assertTrue(self.reddalert.save_json('/tmp/reddalert_test.tmp', self.test_json_data, logger)) self.assertEqual(logger.mock_calls, [ call.warning('Got empty JSON content, not updating status file!'), call.error("Failed to write file '%s'", '/tmp' * 100) ])
def test_init_logger(self): """ensure we log a license message""" assert self.mock_logger.mock_calls == [ call.warning( "awslimitchecker %s is AGPL-licensed free software; " "all users have a right to the full source code of " "this version. See <%s>", "1.2.3@mytag", "http://myurl", ) ]
def test_random_fallback(self): mock_uuid = Mock(hex='1234abcd') with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s.uuid.uuid4' % pbm, autospec=True) as mock_uuid4: mock_uuid4.return_value = mock_uuid res = self.cls.random_fallback() assert res == '1234abcd' assert mock_uuid4.mock_calls == [call()] assert mock_logger.mock_calls == [ call.warning('Could not determine system ID with any concrete ' 'method; using a random UUID.') ]
def test_add_update_loop(self): self.cls.reactor = Mock(spec_set=reactor) with patch('%s.LoopingCall' % pbm) as mock_looping: with patch('%s.logger' % pbm) as mock_logger: self.cls.add_update_loop() assert mock_logger.mock_calls == [ call.warning('Setting Consul poll interval to %s seconds', 5.0) ] assert mock_looping.mock_calls == [ call(self.cls.update_active_node), call().start(5.0) ]
def test_upper_level(self, logger_obj, handler_cls): handler = Mock() handler.configure_mock(level=logging.INFO) handler_cls.return_value = handler with log_helpers.QuietLogger(logging.WARNING): log_helpers.logger.warning('Test') handler.assert_has_calls( (call.setLevel(logging.WARNING + 1), call.setLevel(logging.INFO))) logger_obj.assert_has_calls((call.warning('Test'), ))
def test_handle_logging_signal_USR2(self): self.cls.log_enabled = True with patch('%s.logger' % pbm) as mock_logger: with patch('%s.getpid' % pbm) as mock_getpid: mock_getpid.return_value = 12345 self.cls.handle_logging_signal(signal.SIGUSR2, None) assert mock_logger.mock_calls == [ call.warning( 'Logging disabled via signal; send SIGUSR1 to PID %d ' 'to enable logging', 12345) ] assert self.cls.log_enabled is False
def test_handle_change_off_no_write(self): self.cls.write_files = False fpath = '/foo/bar/pinevent_123.4567_pin2_state0' type(self.cls.config).QUEUE_PATH = '/foo/bar' with patch('%s.logger' % pbm) as mock_logger: with patch('%s.open' % pbm, mock_open(read_data='')) as mock_opn: with patch('%s.os.utime' % pbm) as mock_utime: self.cls.handle_change(2, 0, 123.4567) assert mock_logger.mock_calls == [ call.warning("Would create event file: %s", fpath) ] assert mock_opn.mock_calls == [] assert mock_utime.mock_calls == []
def test_init_nondefault(self): dummy = Mock() with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch.multiple( pb, autospec=True, find_host_id=DEFAULT, discover_engine=DEFAULT, discover_sensors=DEFAULT, ) as mocks: with patch('%s._list_classes' % pbm, autospec=True) as mock_list: mocks['find_host_id'].return_value = 'myhostid' mocks['discover_engine'].return_value = ('foo.bar.baz', 1234) mocks['discover_sensors'].return_value = [dummy] cls = SensorDaemon(dry_run=True, dummy_data=True, engine_port=1234, engine_addr='foo.bar.baz', interval=12.34, class_args={'foo': 'bar'}) assert cls.dry_run is True assert cls.dummy_data is True assert cls.engine_port == 1234 assert cls.engine_addr == 'foo.bar.baz' assert cls.interval == 12.34 assert cls.host_id == 'myhostid' assert cls.sensors == [dummy] assert mock_logger.mock_calls == [ call.warning('This machine running with host_id %s', 'myhostid'), call.warning("DRY RUN MODE - will not POST data to Engine.") ] assert mocks['find_host_id'].mock_calls == [call(cls)] assert mocks['discover_engine'].mock_calls == [] assert mocks['discover_sensors'].mock_calls == [ call(cls, {'foo': 'bar'}) ] assert mock_list.mock_calls == []
def test_listentls(self): self.cls.tls_factory = Mock() self.cls.reactor = Mock(spec_set=reactor) mock_site = Mock() with patch('%s.logger' % pbm) as mock_logger: self.cls.listentls(mock_site) assert mock_logger.mock_calls == [ call.warning( 'Setting TCP TLS listener on port %d for HTTPS requests', 8080) ] assert self.cls.reactor.mock_calls == [ call.listenSSL(8080, mock_site, self.cls.tls_factory) ]
def test_update_active_node_different(self): self.cls.active_node_ip_port = 'a:b' with patch('%s.get_active_node' % pb) as mock_get: mock_get.return_value = 'c:d' with patch('%s.logger' % pbm) as mock_logger: self.cls.update_active_node() assert mock_get.mock_calls == [call()] assert mock_logger.mock_calls == [ call.warning('Active vault node changed from %s to %s', 'a:b', 'c:d') ] assert self.cls.active_node_ip_port == 'c:d' assert self.cls.last_poll_time == '2015-01-10T12:13:14'
def test_save_json(self): logger = Mock() self.assertFalse(self.reddalert.save_json("/tmp", {}, logger)) self.assertFalse(self.reddalert.save_json("/tmp" * 100, {"foo": "bar"}, logger)) self.assertTrue(self.reddalert.save_json("/tmp/reddalert_test.tmp", self.test_json_data, logger)) self.assertEqual( logger.mock_calls, [ call.warning("Got empty JSON content, not updating status file!"), call.exception("Failed to write file '%s'", "/tmp" * 100), ], )
def test_find_usage_with_endpoint_connection_error(self): mock_conn = Mock() client_error = EndpointConnectionError( endpoint_url='https://firehose.bad-region.amazonaws.com/') mock_conn.list_delivery_streams.side_effect = client_error cls = _FirehoseService(21, 43, {}, None) cls.conn = mock_conn with patch('%s.logger' % self.pbm, autospec=True) as mock_logger: cls.find_usage() error_msg = ( 'Caught exception when trying to use Firehose (' 'perhaps the Firehose service is not available in this region?): ' '%s') assert call.warning(error_msg, client_error) in mock_logger.mock_calls
def test_find_usage_with_endpoint_connection_error(self): mock_conn = Mock() client_error = EndpointConnectionError( endpoint_url='https://firehose.bad-region.amazonaws.com/') mock_conn.list_delivery_streams.side_effect = client_error cls = _FirehoseService(21, 43) cls.conn = mock_conn with patch('%s.logger' % self.pbm, autospec=True) as mock_logger: cls.find_usage() error_msg = ( 'Caught exception when trying to use Firehose (' 'perhaps the Firehose service is not available in this region?): ' '%s') assert call.warning(error_msg, client_error) in mock_logger.mock_calls
def test_discover_sensors_dummy(self): self.cls.dummy_data = True with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s._sensor_classes' % pb) as m_classes: res = self.cls.discover_sensors() assert m_classes.mock_calls == [] assert len(res) == 1 assert isinstance(res[0], DummySensor) assert res[0].host_id == 'myhostid' assert mock_logger.mock_calls == [ call.warning('Running with --dummy - only DummySensor() will be ' 'loaded') ]
def test_exception(self): tmp = self.mock_conn.describe_trusted_advisor_check_refresh_statuses chkstat = {'checkId': 'abc123', 'status': 'none'} tmp.return_value = {'statuses': [chkstat]} with patch('%s.logger' % pbm, autospec=True) as mock_logger: res = self.cls._can_refresh_check('abc123') assert res is True assert tmp.mock_calls == [call(checkIds=['abc123'])] assert mock_logger.mock_calls == [ call.debug('TA Check %s refresh status: %s', 'abc123', chkstat), call.warning('Could not get refresh status for TA check %s', 'abc123', exc_info=True) ]
def test_upper_level(self, logger_obj, handler_cls): handler = Mock() handler.configure_mock(level=logging.INFO) handler_cls.return_value = handler with log_helpers.QuietLogger(logging.WARNING): log_helpers.logger.warning('Test') handler.assert_has_calls(( call.setLevel(logging.WARNING + 1), call.setLevel(logging.INFO) )) logger_obj.assert_has_calls((call.warning('Test'), ))
def test_find_usage_apis_stages_now_paginated(self): mock_conn = Mock() res = result_fixtures.ApiGateway.get_rest_apis mock_paginator = Mock() mock_paginator.paginate.return_value = res def se_res_paginate(restApiId=None): return result_fixtures.ApiGateway.get_resources[restApiId] mock_res_paginator = Mock() mock_res_paginator.paginate.side_effect = se_res_paginate def se_get_paginator(api_name): if api_name == 'get_rest_apis': return mock_paginator elif api_name == 'get_resources': return mock_res_paginator def se_paginate_dict(*args, **kwargs): if args[0] == mock_conn.get_documentation_parts: return result_fixtures.ApiGateway.doc_parts[kwargs['restApiId']] if args[0] == mock_conn.get_authorizers: return result_fixtures.ApiGateway.authorizers[ kwargs['restApiId'] ] def se_get_stages(restApiId=None): r = deepcopy(result_fixtures.ApiGateway.stages[restApiId]) r['position'] = 'foo' return r mock_conn.get_paginator.side_effect = se_get_paginator mock_conn.get_stages.side_effect = se_get_stages cls = _ApigatewayService(21, 43, {}, None) cls.conn = mock_conn with patch('%s.paginate_dict' % pbm, autospec=True) as mock_pd: with patch('%s.logger' % pbm) as mock_logger: mock_pd.side_effect = se_paginate_dict cls._find_usage_apis() assert mock_logger.mock_calls == [ call.debug('Finding usage for APIs'), call.debug('Found %d APIs', 5), call.debug('Finding usage for per-API limits'), call.warning( 'APIGateway get_stages returned more keys than present in ' 'boto3 docs: %s', ['item', 'position'] ) ]
def test_lower_level(self, logger_obj, handler_cls): handler = Mock() handler.configure_mock(level=logging.INFO) handler_cls.return_value = handler with log_helpers.QuietLogger(logging.DEBUG): log_helpers.logger.warning('Test') handler.assert_has_calls((call.setLevel(logging.INFO), )) logger_obj.assert_has_calls(( call.debug( 'QuietLogger requested lower level, than is already set. ' 'Not changing level'), call.warning('Test'), ))