def test_load_json(self): logger = Mock() self.assertEqual(self.reddalert.load_json('asd', logger), {}) self.assertEqual(self.reddalert.load_json(self.test_status_file, logger), self.test_json_data) self.assertEqual(self.reddalert.load_json(self.test_invalid_json, logger), {}) self.assertEqual(logger.mock_calls, [ call.error("Failed to read file '%s'", 'asd'), call.error("Invalid JSON file '%s'", self.test_invalid_json) ])
def test_spec_with_errors_before_and_after_assertions(self): self.run_spec( [examples.spec_with_error_before_and_after_assertions], 'spec with error before and after assertions' ) self.mock.assert_has_calls([ call.success(self.spec, 'given', 'setup'), call.success(self.spec, 'when', 'action'), call.error(self.spec, 'collect', 'result', ANY), call.error(self.spec, 'after', 'an exception is raised', ANY) ]) self.assertIsInstance(self._extract_exception_from_call(2), KeyError) self.assertIsInstance(self._extract_exception_from_call(3), ValueError)
def test_find_usage_instances_key_error(self): mock_inst1A = Mock(spec_set=Instance) type(mock_inst1A).id = '1A' type(mock_inst1A).instance_type = 'foobar' type(mock_inst1A).spot_instance_request_id = None mock_res1 = Mock(spec_set=Reservation) type(mock_res1).instances = [mock_inst1A] mock_conn = Mock(spec_set=EC2Connection) return_value = [mock_res1] cls = _Ec2Service(21, 43) cls.conn = mock_conn cls.limits = {'Running On-Demand t2.micro instances': Mock()} with patch('%s._instance_types' % self.pb, autospec=True) as mock_itypes: with patch('awslimitchecker.services.ec2.logger') as mock_logger: with patch('%s.boto_query_wrapper' % self.pbm) as mock_wrapper: mock_wrapper.return_value = return_value mock_itypes.return_value = ['t2.micro'] cls._instance_usage() assert mock_logger.mock_calls == [ call.debug('Getting usage for on-demand instances'), call.error("ERROR - unknown instance type '%s'; not counting", 'foobar'), ] assert mock_conn.mock_calls == [] assert mock_wrapper.mock_calls == [ call(mock_conn.get_all_reservations) ]
def test_find_usage_instances_key_error(self): mock_inst1A = Mock(spec_set=Instance) type(mock_inst1A).id = '1A' type(mock_inst1A).instance_type = 'foobar' type(mock_inst1A).spot_instance_request_id = None mock_res1 = Mock(spec_set=Reservation) type(mock_res1).instances = [mock_inst1A] mock_conn = Mock(spec_set=EC2Connection) return_value = [mock_res1] cls = _Ec2Service(21, 43) cls.conn = mock_conn cls.limits = {'Running On-Demand t2.micro instances': Mock()} with patch( '%s._instance_types' % self.pb, autospec=True) as mock_itypes: with patch('awslimitchecker.services.ec2.logger') as mock_logger: with patch('%s.boto_query_wrapper' % self.pbm) as mock_wrapper: mock_wrapper.return_value = return_value mock_itypes.return_value = ['t2.micro'] cls._instance_usage() assert mock_logger.mock_calls == [ call.debug('Getting usage for on-demand instances'), call.error("ERROR - unknown instance type '%s'; not counting", 'foobar'), ] assert mock_conn.mock_calls == [] assert mock_wrapper.mock_calls == [call(mock_conn.get_all_reservations)]
def test_read_and_send_bad_status_code(self): s1 = Mock(spec_set=BaseSensor) s1.read.return_value = { 'sensor1': {'data': 's1data'}, 'sensor2': {'data': 's2data'}, } self.cls.sensors = [s1] with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s.requests.post' % pbm, autospec=True) as mock_post: mock_post.return_value = Mock(status_code=404, text='foo') self.cls.read_and_send() url = 'http://foo.bar.baz:1234/v1/sensors/update' data = { 'host_id': 'myhostid', 'sensors': { 'sensor1': {'data': 's1data'}, 'sensor2': {'data': 's2data'} } } assert mock_post.mock_calls == [ call(url, json=data) ] assert mock_logger.mock_calls == [ call.debug('Reading sensors'), call.debug('POSTing sensor data to %s: %s', url, data), call.error('Error POSTing sensor data; got status code %s: %s', 404, 'foo') ]
def test_fail_always(self): self.config['splunk_max_attempts'] = 3 self.config['splunk_hex_max_length'] = None with patch('%s.sleep' % pbm) as mock_sleep: with patch('%s.uniform' % pbm) as mock_uniform: with patch('%s._send_splunk' % pb) as mock_send: mock_uniform.return_value = 1.2 mock_send.side_effect = RuntimeError('foo') res = self.cls._try_send({'foo': 'bar'}) assert res is False assert mock_sleep.mock_calls == [call(1.2), call(1.2), call(1.2)] assert mock_uniform.mock_calls == [call(1, 4), call(1, 4), call(1, 4)] assert mock_send.mock_calls == [ call('{"foo": "bar"}'), call('{"foo": "bar"}'), call('{"foo": "bar"}') ] assert self.mock_logger.mock_calls == [ call.warning( 'Caught exception sending to Splunk; retry in %s seconds', 1.2), call.warning( 'Caught exception sending to Splunk; retry in %s seconds', 1.2), call.warning( 'Caught exception sending to Splunk; retry in %s seconds', 1.2), call.error('ERROR - Could not POST to Splunk after %d tries.', 3) ]
def test_find_usage_nat_gateways(self): subnets = result_fixtures.VPC.test_find_usage_nat_gateways_subnets response = result_fixtures.VPC.test_find_usage_nat_gateways mock_conn = Mock() mock_conn.describe_nat_gateways.return_value = response with patch('%s.logger' % self.pbm) as mock_logger: cls = _VpcService(21, 43) cls.conn = mock_conn cls._find_usage_nat_gateways(subnets) assert len(cls.limits['NAT Gateways per AZ'].get_current_usage()) == 2 az2 = cls.limits['NAT Gateways per AZ'].get_current_usage()[0] assert az2.get_value() == 3 assert az2.resource_id == 'az2' az3 = cls.limits['NAT Gateways per AZ'].get_current_usage()[1] assert az3.get_value() == 1 assert az3.resource_id == 'az3' assert mock_conn.mock_calls == [ call.describe_nat_gateways(), ] assert mock_logger.mock_calls == [ call.error( 'ERROR: NAT Gateway %s in SubnetId %s, but SubnetId not ' 'found in subnet_to_az; Gateway cannot be counted!', 'nat-124', 'subnet4' ), call.debug( 'Skipping NAT Gateway %s in state: %s', 'nat-125', 'deleted' ), call.debug( 'Skipping NAT Gateway %s in state: %s', 'nat-127', 'failed' ) ]
def test_send_bad_status(self): self.config['splunk_hec_url'] = 'https://splunk.url/foo' self.config['splunk_hec_token'] = 'stoken' m_resp = Mock(spec_set=requests.models.Response) type(m_resp).status_code = 403 type(m_resp).text = '{"text": "Success"}' type(m_resp).headers = {'H1': 'V1'} m_resp.json.return_value = {'text': 'Success'} with patch('%s.requests' % pbm, autospec=True) as mock_req: mock_req.post.return_value = m_resp with pytest.raises(RuntimeError): self.cls._send_splunk('{"foo": "bar"}') assert mock_req.mock_calls == [ call.post('https://splunk.url/foo', headers={'Authorization': 'Splunk stoken'}, data='{"foo": "bar"}') ] assert self.mock_logger.mock_calls == [ call.debug('Send to Splunk (%s): %s', 'https://splunk.url/foo', '{"foo": "bar"}'), call.debug('Splunk POST got response code %s HEADERS=%s BODY: %s', 403, {'H1': 'V1'}, '{"text": "Success"}'), call.error( 'Splunk POST returned non-20x response: %s HEADERS=%s BODY: %s', 403, {'H1': 'V1'}, '{"text": "Success"}') ]
def test_send_non_success_no_json(self): self.config['splunk_hec_url'] = 'https://splunk.url/foo' self.config['splunk_hec_token'] = 'stoken' def se_exc(*args, **kwargs): raise Exception('foo') m_resp = Mock(spec_set=requests.models.Response) type(m_resp).status_code = 200 type(m_resp).text = '{"text": "Failure"}' type(m_resp).headers = {'H1': 'V1'} m_resp.json.side_effect = se_exc with patch('%s.requests' % pbm, autospec=True) as mock_req: mock_req.post.return_value = m_resp with pytest.raises(RuntimeError): self.cls._send_splunk('{"foo": "bar"}') assert mock_req.mock_calls == [ call.post('https://splunk.url/foo', headers={'Authorization': 'Splunk stoken'}, data='{"foo": "bar"}'), call.post().json() ] assert self.mock_logger.mock_calls == [ call.debug('Send to Splunk (%s): %s', 'https://splunk.url/foo', '{"foo": "bar"}'), call.debug('Splunk POST got response code %s HEADERS=%s BODY: %s', 200, {'H1': 'V1'}, '{"text": "Failure"}'), call.error('Splunk POST returned non-success response: %s', {'text': '{"text": "Failure"}'}) ]
def test_find_usage_nat_gateways_exception(self): subnets = result_fixtures.VPC.test_find_usage_nat_gateways_subnets def se_exc(*args, **kwargs): raise ClientError({'Error': {}}, 'opname') mock_conn = Mock() mock_conn.describe_nat_gateways.side_effect = se_exc cls = _VpcService(21, 43, {}, None) cls._current_account_id = '0123456789' cls.conn = mock_conn with patch('%s.logger' % self.pbm, autospec=True) as mock_logger: cls._find_usage_nat_gateways(subnets) assert len(cls.limits['NAT Gateways per AZ'].get_current_usage()) == 0 assert mock_conn.mock_calls == [ call.describe_nat_gateways(), ] assert mock_logger.mock_calls == [ call.error( 'Caught exception when trying to list NAT Gateways; ' 'perhaps NAT service does not exist in this region?', exc_info=1) ]
def test_find_usage_spot_fleets_paginated(self): data = deepcopy(fixtures.test_find_usage_spot_fleets) data['NextToken'] = 'string' mock_conn = Mock() mock_client_conn = Mock() mock_client_conn.describe_spot_fleet_requests.return_value = data cls = _Ec2Service(21, 43) cls.resource_conn = mock_conn cls.conn = mock_client_conn with patch('awslimitchecker.services.ec2.logger') as mock_logger: cls._find_usage_spot_fleets() assert mock_conn.mock_calls == [] assert mock_client_conn.mock_calls == [ call.describe_spot_fleet_requests() ] total = cls.limits['Max active spot fleets per ' 'region'].get_current_usage() assert len(total) == 1 assert total[0].get_value() == 2 totalcap = cls.limits['Max target capacity for all spot fleets ' 'in region'].get_current_usage() assert len(totalcap) == 1 assert totalcap[0].get_value() == 44 cap_per_fleet = cls.limits['Max target capacity per spot ' 'fleet'].get_current_usage() assert len(cap_per_fleet) == 2 assert cap_per_fleet[0].get_value() == 11 assert cap_per_fleet[0].resource_id == 'req2' assert cap_per_fleet[1].get_value() == 33 assert cap_per_fleet[1].resource_id == 'req4' launch_specs = cls.limits['Max launch specifications ' 'per spot fleet'].get_current_usage() assert len(launch_specs) == 2 assert launch_specs[0].get_value() == 3 assert launch_specs[0].resource_id == 'req2' assert launch_specs[1].get_value() == 1 assert launch_specs[1].resource_id == 'req4' assert mock_logger.mock_calls == [ call.debug('Getting spot fleet request usage'), call.error('Error: describe_spot_fleet_requests() response ' 'includes pagination token, but pagination not ' 'configured in awslimitchecker.'), call.debug('Skipping spot fleet request %s in state %s', 'req1', 'failed'), call.debug('Active fleet %s: target capacity=%s, %d launch specs', 'req2', 11, 3), call.debug('Skipping spot fleet request %s in state %s', 'req3', 'modifying'), call.debug('Active fleet %s: target capacity=%s, %d launch specs', 'req4', 33, 1), call.debug( 'Total active spot fleets: %d; total target capacity ' 'for all spot fleets: %d', 2, 44) ]
def test__exc_to_http_exc__HTTPException_server_error(self, LOGGER): exc = HTTPServerError() http_exc = ConfigHelper.exc_to_http_exc(exc) self.assertIs(http_exc, exc) self.assertEqual(http_exc.code, 500) self.assertEqual(LOGGER.mock_calls, [ call.error(ANY, exc, ANY, 500, exc_info=True), ])
def test_HTTPException_server_error(self, LOGGER): exc = HTTPServerError() http_exc = exc_to_http_exc(exc) self.assertIs(http_exc, exc) self.assertEqual(http_exc.code, 500) self.assertEqual(LOGGER.mock_calls, [ call.error(ANY, exc, ANY, 500, exc_info=True), ])
def test_find_usage_spot_fleets_paginated(self): data = deepcopy(fixtures.test_find_usage_spot_fleets) data['NextToken'] = 'string' mock_conn = Mock() mock_client_conn = Mock() mock_client_conn.describe_spot_fleet_requests.return_value = data cls = _Ec2Service(21, 43) cls.resource_conn = mock_conn cls.conn = mock_client_conn with patch('awslimitchecker.services.ec2.logger') as mock_logger: cls._find_usage_spot_fleets() assert mock_conn.mock_calls == [] assert mock_client_conn.mock_calls == [ call.describe_spot_fleet_requests() ] total = cls.limits['Max active spot fleets per ' 'region'].get_current_usage() assert len(total) == 1 assert total[0].get_value() == 2 totalcap = cls.limits['Max target capacity for all spot fleets ' 'in region'].get_current_usage() assert len(totalcap) == 1 assert totalcap[0].get_value() == 44 cap_per_fleet = cls.limits['Max target capacity per spot ' 'fleet'].get_current_usage() assert len(cap_per_fleet) == 2 assert cap_per_fleet[0].get_value() == 11 assert cap_per_fleet[0].resource_id == 'req2' assert cap_per_fleet[1].get_value() == 33 assert cap_per_fleet[1].resource_id == 'req4' launch_specs = cls.limits['Max launch specifications ' 'per spot fleet'].get_current_usage() assert len(launch_specs) == 2 assert launch_specs[0].get_value() == 3 assert launch_specs[0].resource_id == 'req2' assert launch_specs[1].get_value() == 1 assert launch_specs[1].resource_id == 'req4' assert mock_logger.mock_calls == [ call.debug('Getting spot fleet request usage'), call.error('Error: describe_spot_fleet_requests() response ' 'includes pagination token, but pagination not ' 'configured in awslimitchecker.'), call.debug('Skipping spot fleet request %s in state %s', 'req1', 'failed'), call.debug('Active fleet %s: target capacity=%s, %d launch specs', 'req2', 11, 3), call.debug('Skipping spot fleet request %s in state %s', 'req3', 'modifying'), call.debug('Active fleet %s: target capacity=%s, %d launch specs', 'req4', 33, 1), call.debug('Total active spot fleets: %d; total target capacity ' 'for all spot fleets: %d', 2, 44) ]
def test__exc_to_http_exc__other_exception(self, LOGGER): exc = ZeroDivisionError http_exc = ConfigHelper.exc_to_http_exc(exc) self.assertIsInstance(http_exc, HTTPServerError) self.assertEqual(http_exc.code, 500) self.assertIs(http_exc.detail, None) # no detail self.assertEqual(LOGGER.mock_calls, [ call.error(ANY, exc, exc_info=True), ])
def test__exc_to_http_exc__other_DataAPIError(self, LOGGER): exc = DataAPIError(public_message='FOO') # custom public message http_exc = ConfigHelper.exc_to_http_exc(exc) self.assertIsInstance(http_exc, HTTPServerError) self.assertEqual(http_exc.code, 500) self.assertEqual(http_exc.detail, 'FOO') # detail == custom public message self.assertEqual(LOGGER.mock_calls, [ call.error(ANY, exc, ANY, exc_info=True), ])
def test_not_root_user(self): plugininst = hvshsdist.http_vs_https_dist() plugininst._has_permission = MagicMock(return_value=False) with patch('w3af.plugins.infrastructure.http_vs_https_dist.om.out') as om_mock: plugininst.discover(None, None) ecall = call.error(hvshsdist.PERM_ERROR_MSG) self.assertIn(ecall, om_mock.mock_calls)
def test_other_DataAPIError_2(self, LOGGER): exc = DataAPIError() # no specific public message http_exc = exc_to_http_exc(exc) self.assertIsInstance(http_exc, HTTPServerError) self.assertEqual(http_exc.code, 500) self.assertIs(http_exc.detail, None) # *no* detail self.assertEqual(LOGGER.mock_calls, [ call.error(ANY, exc, ANY, exc_info=True), ])
def test__exc_to_http_exc__other_DataAPIError_2(self, LOGGER): exc = DataAPIError() # no specific public message http_exc = ConfigHelper.exc_to_http_exc(exc) self.assertIsInstance(http_exc, HTTPServerError) self.assertEqual(http_exc.code, 500) self.assertIs(http_exc.detail, None) # *no* detail self.assertEqual(LOGGER.mock_calls, [ call.error(ANY, exc, ANY, exc_info=True), ])
def test_other_exception(self, LOGGER): exc = ZeroDivisionError http_exc = exc_to_http_exc(exc) self.assertIsInstance(http_exc, HTTPServerError) self.assertEqual(http_exc.code, 500) self.assertIs(http_exc.detail, None) # no detail self.assertEqual(LOGGER.mock_calls, [ call.error(ANY, exc, exc_info=True), ])
def test_save_json(self): logger = Mock() self.assertFalse(self.reddalert.save_json('/tmp' * 100, {}, logger)) self.assertTrue(self.reddalert.save_json('/tmp/reddalert_test.tmp', self.test_json_data, logger)) self.assertEqual(logger.mock_calls, [ call.error("Failed to write file '%s'", '/tmp' * 100) ])
def test__exc_to_http_exc__ResultCleaningError_2(self, LOGGER): exc = ResultCleaningError() # no specific public message http_exc = ConfigHelper.exc_to_http_exc(exc) self.assertIsInstance(http_exc, HTTPServerError) self.assertEqual(http_exc.code, 500) self.assertIs(http_exc.detail, None) # *no* detail self.assertEqual(LOGGER.mock_calls, [ call.error(ANY, exc, ANY, exc_info=True), ])
def test_not_root_user(self): plugininst = hvshsdist.http_vs_https_dist() plugininst._has_permission = MagicMock(return_value=False) with patch('w3af.plugins.infrastructure.http_vs_https_dist.om.out') as om_mock: plugininst.discover(None) ecall = call.error(hvshsdist.PERM_ERROR_MSG) self.assertIn(ecall, om_mock.mock_calls)
def test_other_DataAPIError(self, LOGGER): exc = DataAPIError(public_message='FOO') # custom public message http_exc = exc_to_http_exc(exc) self.assertIsInstance(http_exc, HTTPServerError) self.assertEqual(http_exc.code, 500) self.assertEqual(http_exc.detail, 'FOO') # detail == custom public message self.assertEqual(LOGGER.mock_calls, [ call.error(ANY, exc, ANY, exc_info=True), ])
def test_spec_with_assertion_error(self): self.run_spec( [examples.spec_with_assertion_error], 'spec with assertion error' ) self.mock.assert_has_calls([ call.error(self.spec, 'then', 'it should raise an error', ANY), call.success(self.spec, 'then', 'it should run other assertions'), call.success(self.spec, 'after', 'cleanup') ]) self.assertIsInstance(self._extract_exception_from_call(0), KeyError)
def test_spec_with_error_before_assertions_with_no_cleanup(self): self.run_spec( [examples.spec_with_error_before_assertions_without_cleanup], 'spec with error before assertions without cleanup' ) self.assertSequenceEqual(self.mock.mock_calls, [ call.error(self.spec, 'when', 'an exception is raised', ANY), call.spec_complete(), ]) self.assertIsInstance(self._extract_exception_from_call(0), KeyError)
def test__exc_to_http_exc__ResultCleaningError(self, LOGGER): exc = ResultCleaningError( public_message='FOO') # custom public message http_exc = ConfigHelper.exc_to_http_exc(exc) self.assertIsInstance(http_exc, HTTPServerError) self.assertEqual(http_exc.code, 500) self.assertEqual(http_exc.detail, 'FOO') # detail == custom public message self.assertEqual(LOGGER.mock_calls, [ call.error(ANY, exc, ANY, exc_info=True), ])
def test_save_json(self): logger = Mock() self.assertFalse(self.reddalert.save_json('/tmp', {}, logger)) self.assertFalse(self.reddalert.save_json('/tmp' * 100, {'foo': 'bar'}, logger)) self.assertTrue(self.reddalert.save_json('/tmp/reddalert_test.tmp', self.test_json_data, logger)) self.assertEqual(logger.mock_calls, [ call.warning('Got empty JSON content, not updating status file!'), call.error("Failed to write file '%s'", '/tmp' * 100) ])
def test_stop_on_must_stop_exception(self): """ Verify that the ScanMustStopException stops the scan. """ self.exception_plugin.exception_to_raise = ScanMustStopException with patch('w3af.core.controllers.w3afCore.om.out') as om_mock: self.w3afcore.start() error = "\n**IMPORTANT** The following error was detected by w3af"\ " and couldn't be resolved:\nTest exception.\n" self.assertIn(call.error(error), om_mock.mock_calls)
def test_stop_on_must_stop_exception(self): """ Verify that the ScanMustStopException stops the scan. """ self.exception_plugin.exception_to_raise = ScanMustStopException with patch('w3af.core.controllers.w3afCore.om.out') as om_mock: self.w3afcore.start() error = ('The following error was detected and could not be' ' resolved:\nTest exception.\n') self.assertIn(call.error(error), om_mock.mock_calls)
def test_spec_that_fails_initialization(self): self.run_spec( [examples.spec_that_fails_initialization], 'spec that fails initialization' ) self.mock.assert_has_calls([call.error(self.spec, ANY, ANY, ANY)]) exception = self._extract_exception_from_call(0) self.assertIsInstance(exception, SpecInitializationError) self.assertEqual( 'The spec (spec that fails initialization) could ' 'not be initialized (error in constructor).', exception.message)
def test_stop_on_must_stop_exception(self): ''' Verify that the w3afMustStopException stops the scan. ''' self.exception_plugin.exception_to_raise = w3afMustStopException with patch('core.controllers.w3afCore.om.out') as om_mock: self.w3afcore.start() error = "\n**IMPORTANT** The following error was detected by w3af"\ " and couldn't be resolved:\nTest exception.\n" self.assertIn(call.error(error), om_mock.mock_calls)
def test_find_usage_ebs(self): response = result_fixtures.EBS.test_find_usage_ebs mock_conn = Mock() cls = _EbsService(21, 43) cls.conn = mock_conn with patch('awslimitchecker.services.ebs.logger') as mock_logger: with patch('%s.paginate_dict' % self.pbm) as mock_paginate: mock_paginate.return_value = response cls._find_usage_ebs() assert mock_logger.mock_calls == [ call.debug("Getting usage for EBS volumes"), call.error( "ERROR - unknown volume type '%s' for volume " "%s; not counting", 'othertype', 'vol-7') ] assert len(cls.limits['Provisioned IOPS'].get_current_usage()) == 1 assert cls.limits['Provisioned IOPS' ''].get_current_usage()[0].get_value() == 1000 assert len(cls.limits['Provisioned IOPS (SSD) storage ' '(GiB)'].get_current_usage()) == 1 assert cls.limits['Provisioned IOPS (SSD) storage ' '(GiB)'].get_current_usage()[0].get_value() == 500 assert len(cls.limits['General Purpose (SSD) volume storage ' '(GiB)'].get_current_usage()) == 1 assert cls.limits['General Purpose (SSD) volume storage ' '(GiB)'].get_current_usage()[0].get_value() == 45 assert len(cls.limits['Magnetic volume storage ' '(GiB)'].get_current_usage()) == 1 assert cls.limits['Magnetic volume storage ' '(GiB)'].get_current_usage()[0].get_value() == 508 assert len(cls.limits['Throughput Optimized (HDD) volume storage ' '(GiB)'].get_current_usage()) == 1 assert cls.limits['Throughput Optimized (HDD) volume storage ' '(GiB)'].get_current_usage()[0].get_value() == 500 assert len(cls.limits['Cold (HDD) volume storage ' '(GiB)'].get_current_usage()) == 1 assert cls.limits['Cold (HDD) volume storage ' '(GiB)'].get_current_usage()[0].get_value() == 1000 assert len(cls.limits['Active volumes'].get_current_usage()) == 1 assert cls.limits['Active volumes' ''].get_current_usage()[0].get_value() == 9 assert mock_conn.mock_calls == [] assert mock_paginate.mock_calls == [ call( mock_conn.describe_volumes, alc_marker_path=['NextToken'], alc_data_path=['Volumes'], alc_marker_param='NextToken' ) ]
def test_find_usage_with_endpoint_connection_error(self): mock_conn = Mock() client_error = EndpointConnectionError( endpoint_url='https://firehose.bad-region.amazonaws.com/') mock_conn.list_delivery_streams.side_effect = client_error cls = _FirehoseService(21, 43) cls.conn = mock_conn with patch('%s.logger' % self.pbm, autospec=True) as mock_logger: cls.find_usage() error_msg = ( 'Caught exception when trying to use Firehose; ' 'perhaps the Firehose service is not available in this region?') assert call.error(error_msg, exc_info=1) in mock_logger.mock_calls
def test_spec_with_error_before_assertions(self): self.run_spec( [examples.spec_with_error_before_assertions], 'spec with error before assertions' ) self.mock.assert_has_calls([ call.error(self.spec, 'given', 'an exception is raised', ANY), call.success(self.spec, 'after', 'should be executed to clean up') ]) self.assertNotIn(call.success(ANY, 'when', ANY), self.calls) self.assertNotIn(call.success(ANY, 'collect', ANY), self.calls) self.assertNotIn(call.success(ANY, 'then', ANY), self.calls) self.assertIsInstance(self._extract_exception_from_call(0), KeyError)
def test_spec_with_error_after_assertions(self): self.run_spec( [examples.spec_with_error_after_assertions], 'spec with error after assertions' ) self.mock.assert_has_calls([ call.success(self.spec, 'given', 'setup'), call.success(self.spec, 'when', 'action'), call.success(self.spec, 'collect', 'result'), call.success(self.spec, 'then', 'something'), call.success(self.spec, 'then', 'something else'), call.error(self.spec, 'after', 'an exception is raised', ANY) ]) self.assertIsInstance(self._extract_exception_from_call(-2), KeyError)
def test_delete_fail(self): conn = Mock() conn.delete_message.return_value = { 'ResponseMetadata': {'HTTPStatusCode': 503} } with patch('%s.logger' % pbm, autospec=True) as mock_logger: self.cls._delete_msg(conn, 'qurl', 'rh') assert conn.mock_calls == [ call.delete_message(QueueUrl='qurl', ReceiptHandle='rh') ] assert mock_logger.mock_calls == [ call.error('Error: message with receipt handle %s in queue %s ' 'was not successfully deleted (HTTP %s)', 'rh', 'qurl', 503) ]
def test_spec_with_no_assertions(self): self.run_spec( [examples.spec_without_assertions], 'spec without assertions' ) self.assertEqual( self.mock.mock_calls[0], call.error(self.spec, 'collect steps', 'not implemented', ANY) ) exception = self._extract_exception_from_call(0) self.assertIsInstance(exception, SpecInitializationError) self.assertEqual( 'No assertions ("@then" decorators) found with ' 'the spec (spec without assertions).', exception.message)
def test_excessive_use_of_spec_step_methods(self): self.run_spec( [examples.spec_with_multiple_givens, examples.spec_with_multiple_whens, examples.spec_with_multiple_collects, examples.spec_with_multiple_afters], ['spec with multiple givens', 'spec with multiple whens', 'spec with multiple collects', 'spec with multiple afters',] ) self.mock.assert_has_calls([ call.error(self.spec[0], 'collect steps', 'extra steps', ANY), call.spec_complete(), call.error(self.spec[1], 'collect steps', 'extra steps', ANY), call.spec_complete(), call.error(self.spec[2], 'collect steps', 'extra steps', ANY), call.spec_complete(), call.error(self.spec[3], 'collect steps', 'extra steps', ANY), call.spec_complete(), ]) exceptions = [self._extract_exception_from_call(x) for x in [0, 2, 4, 6]] self.assertTrue( all(isinstance(e, SpecInitializationError) for e in exceptions)) expected_messages = [ "The spec (spec with multiple givens) " "has extra steps (['given']).", "The spec (spec with multiple whens) " "has extra steps (['when']).", "The spec (spec with multiple collects) " "has extra steps (['collect']).", "The spec (spec with multiple afters) " "has extra steps (['after']).", ] self.assertSequenceEqual( expected_messages, [e.message for e in exceptions])
def test_load_settings_no_settings(self): self.cls.FOO = 0 env = {'foo': 'bar'} with patch('%s.os.environ' % pbm, env): with patch('%s._load_module' % pb) as mock_load: with patch('%s.logger' % pbm) as mock_logger: mock_load.return_value = None self.cls._load_settings() assert mock_logger.mock_calls == [ call.debug("Loading settings from %s", 'piface_webhooks.settings'), call.error( 'Settings module %s could not be loaded; using default ' 'settings!', 'piface_webhooks.settings') ] assert mock_load.mock_calls == [call('piface_webhooks.settings')] assert self.cls.FOO == 0
def test_timeout(self): self.cls.refresh_timeout = 45 check_dt = datetime(2016, 12, 16, hour=10, minute=30, second=12, tzinfo=utc) now_dts = [ datetime(2016, 12, 16, hour=11, minute=30, second=0, tzinfo=utc), datetime(2016, 12, 16, hour=11, minute=30, second=0, tzinfo=utc), datetime(2016, 12, 16, hour=11, minute=30, second=30, tzinfo=utc), datetime(2016, 12, 16, hour=11, minute=31, second=0, tzinfo=utc), ] status = {'statuses': [{'status': 'processing'}]} m_s = self.mock_conn.describe_trusted_advisor_check_refresh_statuses with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s.sleep' % pbm, autospec=True) as mock_sleep: with patch('%s._get_check_result' % pb, autospec=True) as gcr: with patch('%s.datetime_now' % pbm) as mock_dt_now: mock_dt_now.side_effect = now_dts m_s.return_value = status gcr.return_value = ({'foo': 'bar'}, check_dt) res = self.cls._poll_for_refresh('abc123') assert res == {'foo': 'bar'} assert self.mock_conn.mock_calls == [ call.describe_trusted_advisor_check_refresh_statuses( checkIds=['abc123']), call.describe_trusted_advisor_check_refresh_statuses( checkIds=['abc123']) ] assert gcr.mock_calls == [call(self.cls, 'abc123')] assert mock_sleep.mock_calls == [call(30), call(30)] assert mock_dt_now.mock_calls == [call(), call(), call(), call()] assert mock_logger.mock_calls == [ call.warning('Polling for TA check %s refresh...', 'abc123'), call.debug('Checking refresh status'), call.info('Refresh status: %s; sleeping 30s', 'processing'), call.debug('Checking refresh status'), call.info('Refresh status: %s; sleeping 30s', 'processing'), call.error('Timed out waiting for TA Check refresh; status=%s', 'processing'), call.info('Done polling for check refresh'), call.debug('Check shows last refresh time of: %s', check_dt) ]
def test_handle_event(self): def se_enqueue(conn, qname, msg): if qname == 'q1': return 'msgid1' if qname == 'q2': raise Exception('foo') if qname == 'q3': return 'msgid3' return 'othermsgid' with patch.multiple( pbm, autospec=True, logger=DEFAULT, queues_for_endpoint=DEFAULT, msg_body_for_event=DEFAULT, boto3=DEFAULT, try_enqueue=DEFAULT ) as mocks: mocks['queues_for_endpoint'].return_value = ['q1', 'q2', 'q3'] mocks['msg_body_for_event'].return_value = 'mybody' mocks['try_enqueue'].side_effect = se_enqueue res = handle_event(self.mock_event, self.mock_context) assert res == { 'status': 'partial', 'message': 'enqueued 2 messages; 1 failed', 'SQSMessageIds': ['msgid1', 'msgid3'] } assert mocks['queues_for_endpoint'].mock_calls == [ call(self.mock_event) ] assert mocks['msg_body_for_event'].mock_calls == [ call(self.mock_event, self.mock_context) ] assert mocks['try_enqueue'].mock_calls == [ call(mocks['boto3'].client.return_value, 'q1', 'mybody'), call(mocks['boto3'].client.return_value, 'q2', 'mybody'), call(mocks['boto3'].client.return_value, 'q3', 'mybody'), ] assert mocks['boto3'].mock_calls == [ call.client('sqs') ] assert mocks['logger'].mock_calls == [ call.error('Failed enqueueing message in %s:', 'q2', exc_info=1) ]
def test_load_settings_no_settings(self): self.cls.FOO = 0 env = {"foo": "bar"} with patch("%s.os.environ" % pbm, env): with patch("%s._load_module" % pb) as mock_load: with patch("%s.logger" % pbm) as mock_logger: mock_load.return_value = None self.cls._load_settings() assert mock_logger.mock_calls == [ call.debug("Loading settings from %s", "piface_webhooks.settings"), call.error( "Settings module %s could not be loaded; using default " "settings!", "piface_webhooks.settings" ), ] assert mock_load.mock_calls == [call("piface_webhooks.settings")] assert self.cls.FOO == 0
def test_validate_version_no_re_match(self): def se_run(*args, **kwargs): if args[1] == 'version': return 'foo bar' # validate is called in __init__; we can't easily patch and re-call with patch('%s._run_tf' % pb, autospec=True) as mock_run: mock_run.side_effect = se_run with patch('%s.logger' % pbm, autospec=True) as mock_logger: cls = TerraformRunner(self.mock_config(), 'terraform-bin') assert mock_run.mock_calls == [ call(cls, 'version') ] assert mock_logger.mock_calls == [ call.error('Unable to determine terraform version; will not ' 'validate config. Note that this may cause problems ' 'when using older Terraform versions.') ]
def test_instance_usage_key_error(self): mock_conn = Mock() data = fixtures.test_instance_usage_key_error mock_conn.instances.all.return_value = data cls = _Ec2Service(21, 43) cls.resource_conn = mock_conn cls.limits = {'Running On-Demand t2.micro instances': Mock()} with patch('%s._instance_types' % self.pb, autospec=True) as mock_itypes: with patch('awslimitchecker.services.ec2.logger') as mock_logger: mock_itypes.return_value = ['t2.micro'] cls._instance_usage() assert mock_logger.mock_calls == [ call.debug('Getting usage for on-demand instances'), call.error("ERROR - unknown instance type '%s'; not counting", 'foobar'), ] assert mock_conn.mock_calls == [call.instances.all()]
def test_skipping_ResultCleaningError_if_flag_is_false(self, LOGGER): self.cleaned_list[1] = ResultCleaningError self.cls.break_on_result_cleaning_error = False self._do_call() self.assertEqual(LOGGER.mock_calls, [ call.error(ANY, ANY), ]) self.assertEqual(self.adjust_exc.call_count, 0) self.cls.get_clean_result_dict_kwargs.assert_called_once_with() self.cls.call_api_method.assert_called_once_with(sen.api_method) self.assertEqual(self.data_spec.clean_result_dict.mock_calls, [ call(sen.result_dict_1, kwarg=sen.kwarg), call(sen.result_dict_2, kwarg=sen.kwarg), call(sen.result_dict_3, kwarg=sen.kwarg), ]) self.assertEqual(self.results, [ sen.cleaned_result_dict_1, sen.cleaned_result_dict_3, ])
def test_webhook2lambda2sqs_handler_exception(self): def se_exc(*args): raise Exception('foo') with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s.handle_event' % pbm, autospec=True) as mock_handle: mock_handle.side_effect = se_exc with patch('%s.endpoints' % pbm, self.endpoints): with pytest.raises(Exception) as excinfo: webhook2lambda2sqs_handler(self.mock_event, self.mock_context) assert exc_msg(excinfo.value) == 'foo' assert mock_handle.mock_calls == [ call(self.mock_event, self.mock_context) ] assert mock_logger.mock_calls == [ call.error('Error handling event; event=%s context=%s', self.mock_event, vars(self.mock_context), exc_info=1) ]
def test_payload_too_long(self): self.config['splunk_max_attempts'] = 3 self.config['splunk_hec_max_length'] = 3000 p = {} for i in range(1, 2000): p['%d' % i] = i j = json.dumps(p) with patch('%s.sleep' % pbm) as mock_sleep: with patch('%s.uniform' % pbm) as mock_uniform: with patch('%s._send_splunk' % pb) as mock_send: mock_uniform.return_value = 1.2 self.cls._try_send(p) assert mock_sleep.mock_calls == [] assert mock_uniform.mock_calls == [] assert mock_send.mock_calls == [call(j)] assert self.mock_logger.mock_calls == [ call.error( 'ERROR: Sending %d characters to Splunk HEC; line length ' 'limit is %d characters. Data will be truncated: %s', 25772, 3000, j) ]
def test_status_mongo_fail(self): def se_exc(**kwargs): raise Exception() mock_req = Mock(spec_set=Request) mock_headers = Mock() type(mock_req).responseHeaders = mock_headers mock_coll = Mock(spec_set=Collection) mock_coll.find.side_effect = se_exc mock_dbconn = Mock() cls = TestClass(Mock(), Mock(), mock_dbconn, []) cls.dbconn = mock_dbconn with patch('%s.get_collection' % pbm, autospec=True) as mock_get_coll: with patch('%s.returnValue' % pbm) as mock_retval: with patch('%s.logger' % pbm, autospec=True) as mock_logger: mock_get_coll.return_value = mock_coll cls.status(cls, mock_req) expected = json.dumps( { 'status': False, 'dependencies': { 'mongodb': False } }, sort_keys=True) assert mock_headers.mock_calls == [ call.addRawHeader(b"content-type", b"application/json") ] assert mock_coll.mock_calls == [call.find(limit=1)] assert mock_req.mock_calls == [call.setResponseCode(503)] assert mock_get_coll.mock_calls == [call(mock_dbconn, COLL_SENSORS)] # need to mock out get_collection and the find method on its result # also mock the request object and assert on calls assert mock_retval.mock_calls == [call(expected)] assert mock_logger.mock_calls == [ call.error('DB connection test failed', exc_info=1) ]
def test_read_and_send_bad_status_code(self): s1 = Mock(spec_set=BaseSensor) s1.read.return_value = { 'sensor1': { 'data': 's1data' }, 'sensor2': { 'data': 's2data' }, } self.cls.sensors = [s1] with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s.requests.post' % pbm, autospec=True) as mock_post: mock_post.return_value = Mock(status_code=404, text='foo') self.cls.read_and_send() url = 'http://foo.bar.baz:1234/v1/sensors/update' data = { 'host_id': 'myhostid', 'sensors': { 'sensor1': { 'data': 's1data' }, 'sensor2': { 'data': 's2data' } } } assert mock_post.mock_calls == [call(url, json=data)] assert mock_logger.mock_calls == [ call.debug('Reading sensors'), call.debug('POSTing sensor data to %s: %s', url, data), call.error('Error POSTing sensor data; got status code %s: %s', 404, 'foo') ]
def test_send_exception(self): self.config['splunk_hec_url'] = 'https://splunk.url/foo' self.config['splunk_hec_token'] = 'stoken' def se_post(*args, **kwargs): raise Exception('foo') with patch('%s.requests' % pbm, autospec=True) as mock_req: mock_req.post.side_effect = se_post with pytest.raises(Exception): self.cls._send_splunk('{"foo": "bar"}') assert mock_req.mock_calls == [ call.post('https://splunk.url/foo', headers={'Authorization': 'Splunk stoken'}, data='{"foo": "bar"}') ] assert self.mock_logger.mock_calls == [ call.debug('Send to Splunk (%s): %s', 'https://splunk.url/foo', '{"foo": "bar"}'), call.error('Exception during Splunk POST to %s of %s', 'https://splunk.url/foo', '{"foo": "bar"}', exc_info=True) ]
def test_find_usage_ebs(self): # 500G magnetic mock_vol1 = Mock(spec_set=Volume) type(mock_vol1).id = 'vol-1' type(mock_vol1).type = 'standard' # magnetic type(mock_vol1).size = 500 type(mock_vol1).iops = None # 8G magnetic mock_vol2 = Mock(spec_set=Volume) type(mock_vol2).id = 'vol-2' type(mock_vol2).type = 'standard' # magnetic type(mock_vol2).size = 8 type(mock_vol2).iops = None # 15G general purpose SSD, 45 IOPS mock_vol3 = Mock(spec_set=Volume) type(mock_vol3).id = 'vol-3' type(mock_vol3).type = 'gp2' type(mock_vol3).size = 15 type(mock_vol3).iops = 45 # 30G general purpose SSD, 90 IOPS mock_vol4 = Mock(spec_set=Volume) type(mock_vol4).id = 'vol-4' type(mock_vol4).type = 'gp2' type(mock_vol4).size = 30 type(mock_vol4).iops = 90 # 400G PIOPS, 700 IOPS mock_vol5 = Mock(spec_set=Volume) type(mock_vol5).id = 'vol-5' type(mock_vol5).type = 'io1' type(mock_vol5).size = 400 type(mock_vol5).iops = 700 # 100G PIOPS, 300 IOPS mock_vol6 = Mock(spec_set=Volume) type(mock_vol6).id = 'vol-6' type(mock_vol6).type = 'io1' type(mock_vol6).size = 100 type(mock_vol6).iops = 300 mock_vol7 = Mock(spec_set=Volume) type(mock_vol7).id = 'vol-7' type(mock_vol7).type = 'othertype' mock_conn = Mock(spec_set=EC2Connection) return_value = [ mock_vol1, mock_vol2, mock_vol3, mock_vol4, mock_vol5, mock_vol6, mock_vol7 ] cls = _EbsService(21, 43) cls.conn = mock_conn with patch('awslimitchecker.services.ebs.logger') as mock_logger: with patch('%s.boto_query_wrapper' % self.pbm) as mock_wrapper: mock_wrapper.return_value = return_value cls._find_usage_ebs() assert mock_logger.mock_calls == [ call.debug("Getting usage for EBS volumes"), call.error( "ERROR - unknown volume type '%s' for volume " "%s; not counting", 'othertype', 'vol-7') ] assert len(cls.limits['Provisioned IOPS'].get_current_usage()) == 1 assert cls.limits['Provisioned IOPS' ''].get_current_usage()[0].get_value() == 1000 assert len(cls.limits['Provisioned IOPS (SSD) storage ' '(GiB)'].get_current_usage()) == 1 assert cls.limits['Provisioned IOPS (SSD) storage ' '(GiB)'].get_current_usage()[0].get_value() == 500 assert len(cls.limits['General Purpose (SSD) volume storage ' '(GiB)'].get_current_usage()) == 1 assert cls.limits['General Purpose (SSD) volume storage ' '(GiB)'].get_current_usage()[0].get_value() == 45 assert len(cls.limits['Magnetic volume storage ' '(GiB)'].get_current_usage()) == 1 assert cls.limits['Magnetic volume storage ' '(GiB)'].get_current_usage()[0].get_value() == 508 assert len(cls.limits['Active volumes'].get_current_usage()) == 1 assert cls.limits['Active volumes' ''].get_current_usage()[0].get_value() == 7 assert mock_conn.mock_calls == [] assert mock_wrapper.mock_calls == [call(mock_conn.get_all_volumes)]