def test_find_usage_nat_gateways(self): subnets = result_fixtures.VPC.test_find_usage_nat_gateways_subnets response = result_fixtures.VPC.test_find_usage_nat_gateways mock_conn = Mock() mock_conn.describe_nat_gateways.return_value = response with patch('%s.logger' % self.pbm) as mock_logger: cls = _VpcService(21, 43) cls.conn = mock_conn cls._find_usage_nat_gateways(subnets) assert len(cls.limits['NAT Gateways per AZ'].get_current_usage()) == 2 az2 = cls.limits['NAT Gateways per AZ'].get_current_usage()[0] assert az2.get_value() == 3 assert az2.resource_id == 'az2' az3 = cls.limits['NAT Gateways per AZ'].get_current_usage()[1] assert az3.get_value() == 1 assert az3.resource_id == 'az3' assert mock_conn.mock_calls == [ call.describe_nat_gateways(), ] assert mock_logger.mock_calls == [ call.error( 'ERROR: NAT Gateway %s in SubnetId %s, but SubnetId not ' 'found in subnet_to_az; Gateway cannot be counted!', 'nat-124', 'subnet4' ), call.debug( 'Skipping NAT Gateway %s in state: %s', 'nat-125', 'deleted' ), call.debug( 'Skipping NAT Gateway %s in state: %s', 'nat-127', 'failed' ) ]
def test_send_non_success_no_json(self): self.config['splunk_hec_url'] = 'https://splunk.url/foo' self.config['splunk_hec_token'] = 'stoken' def se_exc(*args, **kwargs): raise Exception('foo') m_resp = Mock(spec_set=requests.models.Response) type(m_resp).status_code = 200 type(m_resp).text = '{"text": "Failure"}' type(m_resp).headers = {'H1': 'V1'} m_resp.json.side_effect = se_exc with patch('%s.requests' % pbm, autospec=True) as mock_req: mock_req.post.return_value = m_resp with pytest.raises(RuntimeError): self.cls._send_splunk('{"foo": "bar"}') assert mock_req.mock_calls == [ call.post('https://splunk.url/foo', headers={'Authorization': 'Splunk stoken'}, data='{"foo": "bar"}'), call.post().json() ] assert self.mock_logger.mock_calls == [ call.debug('Send to Splunk (%s): %s', 'https://splunk.url/foo', '{"foo": "bar"}'), call.debug('Splunk POST got response code %s HEADERS=%s BODY: %s', 200, {'H1': 'V1'}, '{"text": "Failure"}'), call.error('Splunk POST returned non-success response: %s', {'text': '{"text": "Failure"}'}) ]
def test_send_bad_status(self): self.config['splunk_hec_url'] = 'https://splunk.url/foo' self.config['splunk_hec_token'] = 'stoken' m_resp = Mock(spec_set=requests.models.Response) type(m_resp).status_code = 403 type(m_resp).text = '{"text": "Success"}' type(m_resp).headers = {'H1': 'V1'} m_resp.json.return_value = {'text': 'Success'} with patch('%s.requests' % pbm, autospec=True) as mock_req: mock_req.post.return_value = m_resp with pytest.raises(RuntimeError): self.cls._send_splunk('{"foo": "bar"}') assert mock_req.mock_calls == [ call.post('https://splunk.url/foo', headers={'Authorization': 'Splunk stoken'}, data='{"foo": "bar"}') ] assert self.mock_logger.mock_calls == [ call.debug('Send to Splunk (%s): %s', 'https://splunk.url/foo', '{"foo": "bar"}'), call.debug('Splunk POST got response code %s HEADERS=%s BODY: %s', 403, {'H1': 'V1'}, '{"text": "Success"}'), call.error( 'Splunk POST returned non-20x response: %s HEADERS=%s BODY: %s', 403, {'H1': 'V1'}, '{"text": "Success"}') ]
def test_show_cloudwatch_logs_none(self, capsys): resp = { 'logStreams': [] } with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s.client' % pbm, autospec=True) as mock_conn: with patch('%s._show_log_stream' % pb, autospec=True) as sls: mock_conn.return_value.describe_log_streams.return_value = \ resp sls.side_effect = [1, 10] self.cls.show_cloudwatch_logs(5) out, err = capsys.readouterr() assert err == '' assert out == '' assert mock_conn.mock_calls == [ call('logs'), call().describe_log_streams(descending=True, limit=5, logGroupName='/aws/lambda/myfname', orderBy='LastEventTime') ] assert sls.mock_calls == [] assert mock_logger.mock_calls == [ call.debug('Log Group Name: %s', '/aws/lambda/myfname'), call.debug('Connecting to AWS Logs API'), call.debug('Getting log streams'), call.debug('Found %d log streams', 0) ]
def test_show_one_queue_empty(self, capsys): conn = Mock() conn.receive_message.return_value = {} with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s._url_for_queue' % pb, autospec=True) as mock_url: mock_url.return_value = 'myurl' with patch('%s._delete_msg' % pb, autospec=True) as mock_del: self.cls._show_one_queue(conn, 'foo', 1, delete=True) out, err = capsys.readouterr() assert err == '' expected_out = "=> Queue 'foo' appears empty.\n" assert out == expected_out assert mock_del.mock_calls == [] assert conn.mock_calls == [ call.receive_message( QueueUrl='myurl', AttributeNames=['All'], MessageAttributeNames=['All'], MaxNumberOfMessages=1, WaitTimeSeconds=20 ) ] assert mock_url.mock_calls == [ call(self.cls, conn, 'foo') ] assert mock_logger.mock_calls == [ call.debug("Queue '%s' url: %s", 'foo', 'myurl'), call.warning("Receiving %d messages from queue'%s'; this may " "take up to 20 seconds.", 1, 'foo'), call.debug('received no messages') ]
def test_send_get(self): dt = datetime(2015, 2, 13, 1, 2, 3, 123456) expected = { 'timestamp': 1423807323, 'datetime_iso8601': '2015-02-13T01:02:03', 'pin_num': 2, 'pin_name': 'pin2', 'state': 0, 'state_name': 'state0name', } self.cls.use_get = True with patch('%s.requests.post' % pbm) as mock_post: with patch('%s.requests.get' % pbm) as mock_get: with patch('%s.logger' % pbm) as mock_logger: type(mock_post.return_value).status_code = 200 type(mock_get.return_value).status_code = 200 self.cls.send(dt, 2, 0, 'pin2', 'state0name') assert mock_post.mock_calls == [] assert mock_get.mock_calls == [ call('myurl', data=expected, timeout=10) ] assert mock_logger.mock_calls == [ call.debug('GETing %s with: %s', 'myurl', expected), call.debug('Request received status code %s', 200) ]
def test_id_string(self): self.cls.id_methods = [ 'raspberrypi_cpu', 'random_fallback', 'uuid_getnode' ] with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch.multiple( pb, autospec=True, uuid_getnode=DEFAULT, random_fallback=DEFAULT, raspberrypi_cpu=DEFAULT, ) as mocks: mocks['raspberrypi_cpu'].return_value = None mocks['random_fallback'].side_effect = self.se_exc mocks['uuid_getnode'].return_value = 'uuidgetnode' res = self.cls.id_string assert res == 'uuidgetnode' assert mock_logger.mock_calls == [ call.debug( 'Exception encountered when trying to determine system ' 'ID via method %s', 'random_fallback', exc_info=1), call.debug('Determined SystemID via method %s', 'uuid_getnode'), call.debug('Host ID: %s', 'uuidgetnode') ]
def test_mode_int(self): self.cls.refresh_mode = 120 # 2 minutes check_dt = datetime(2016, 12, 16, hour=10, minute=30, second=12, tzinfo=utc) with patch('%s._get_check_result' % pb, autospec=True) as mock_gcr: with patch('%s._can_refresh_check' % pb, autospec=True) as mock_crc: with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s._poll_for_refresh' % pb, autospec=True) as mock_pfr: mock_gcr.return_value = ({'mock': 'gcr'}, check_dt) mock_pfr.return_value = {'mock': 'pfr'} mock_crc.return_value = True res = self.cls._get_refreshed_check_result('abc123') assert res == {'mock': 'pfr'} assert mock_gcr.mock_calls == [call(self.cls, 'abc123')] assert mock_crc.mock_calls == [call(self.cls, 'abc123')] assert mock_pfr.mock_calls == [call(self.cls, 'abc123')] assert mock_logger.mock_calls == [ call.debug('Handling refresh of check: %s', 'abc123'), call.debug( 'ta_refresh_mode older; check last refresh: %s; ' 'threshold=%d seconds', check_dt, 120), call.info('Refreshing Trusted Advisor check: %s', 'abc123') ]
def test_run(self): def se_ras(klass): if mock_ras.call_count < 4: return None raise RuntimeError() with patch('%s.read_and_send' % pb, autospec=True) as mock_ras: with patch('%s.sleep' % pbm, autospec=True) as mock_sleep: with patch('%s.logger' % pbm, autospec=True) as mock_logger: mock_ras.side_effect = se_ras with pytest.raises(RuntimeError): self.cls.run() assert mock_ras.mock_calls == [ call(self.cls), call(self.cls), call(self.cls), call(self.cls) ] assert mock_sleep.mock_calls == [call(60.0), call(60.0), call(60.0)] assert mock_logger.mock_calls == [ call.info('Running sensor daemon loop...'), call.debug('Sleeping %ss', 60.0), call.debug('Sleeping %ss', 60.0), call.debug('Sleeping %ss', 60.0) ]
def test_update(self): req_data = { 'host_id': 'myhostid', 'sensors': { 'sensor1': { 'type': 's1type', 'value': 12.345, 'alias': 's1alias', 'extra': 'extraS1' } } } req_json = json.dumps(req_data) mock_req = MagicMock(spec_set=Request) type(mock_req).responseHeaders = Mock() mock_req.content.getvalue.return_value = req_json type(mock_req).client = Mock(host='myhost') with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s._parse_json_request' % pb) as mock_parse: with patch('%s.update_sensor' % pbm, autospec=True) as mock_upd: mock_upd.return_value = 'myid' mock_parse.return_value = req_data res = self.cls.update(None, mock_req) assert res.result == '{"ids": ["myid"], "status": "ok"}' assert mock_parse.mock_calls == [call(mock_req)] assert mock_req.mock_calls == [call.setResponseCode(201)] assert mock_logger.mock_calls == [ call.debug( 'Received sensor update request from %s with content: %s', 'myhost', req_data ), call.debug('update_sensor() return value: %s', 'myid') ]
def test_find_usage_spot_instances(self): data = fixtures.test_find_usage_spot_instances mock_conn = Mock() mock_client_conn = Mock() mock_client_conn.describe_spot_instance_requests.return_value = data cls = _Ec2Service(21, 43) cls.resource_conn = mock_conn cls.conn = mock_client_conn with patch('awslimitchecker.services.ec2.logger') as mock_logger: cls._find_usage_spot_instances() assert mock_conn.mock_calls == [] assert mock_client_conn.mock_calls == [ call.describe_spot_instance_requests() ] lim = cls.limits['Max spot instance requests per region'] usage = lim.get_current_usage() assert len(usage) == 1 assert usage[0].get_value() == 2 assert mock_logger.mock_calls == [ call.debug('Getting spot instance request usage'), call.debug('NOT counting spot instance request %s state=%s', 'reqID1', 'closed'), call.debug('Counting spot instance request %s state=%s', 'reqID2', 'active'), call.debug('Counting spot instance request %s state=%s', 'reqID3', 'open'), call.debug('NOT counting spot instance request %s state=%s', 'reqID4', 'failed') ]
def test_get_api_id_tf(self): conf = Mock() args = Mock(tf_path='tfpath') with patch.multiple( pbm, autospec=True, logger=DEFAULT, TerraformRunner=DEFAULT, AWSInfo=DEFAULT ) as mocks: mocks['TerraformRunner'].return_value._get_outputs.return_value = { 'base_url': 'mytfbase', 'rest_api_id': 'myid' } res = get_api_id(conf, args) assert res == 'myid' assert mocks['TerraformRunner'].mock_calls == [ call(conf, 'tfpath'), call()._get_outputs() ] assert mocks['AWSInfo'].mock_calls == [] assert mocks['logger'].mock_calls == [ call.debug('Trying to get Terraform rest_api_id output'), call.debug('Terraform rest_api_id output: \'%s\'', 'myid') ]
def test_get_api_id_aws(self): def se_exc(*args, **kwargs): raise Exception() conf = Mock() args = Mock(tf_path='tfpath') with patch.multiple( pbm, autospec=True, logger=DEFAULT, TerraformRunner=DEFAULT, AWSInfo=DEFAULT ) as mocks: mocks['TerraformRunner'].return_value._get_outputs.side_effect = \ se_exc mocks['AWSInfo'].return_value.get_api_id.return_value = 'myaid' res = get_api_id(conf, args) assert res == 'myaid' assert mocks['TerraformRunner'].mock_calls == [ call(conf, 'tfpath'), call()._get_outputs() ] assert mocks['AWSInfo'].mock_calls == [ call(conf), call().get_api_id() ] assert mocks['logger'].mock_calls == [ call.debug('Trying to get Terraform rest_api_id output'), call.info('Unable to find API rest_api_id from Terraform state; ' 'querying AWS.', exc_info=1), call.debug('AWS API ID: \'%s\'', 'myaid') ]
def test_register_callbacks(self): mock_listener = Mock(spec_set=InputEventListener) self.cls.listener = mock_listener with patch('%s.logger' % pbm) as mock_logger: self.cls.register_callbacks() assert mock_logger.mock_calls == [ call.debug("registering callbacks"), call.debug('registering callback for %s ON', 0), call.debug('registering callback for %s OFF', 0), call.debug('registering callback for %s ON', 1), call.debug('registering callback for %s OFF', 1), call.debug('registering callback for %s ON', 2), call.debug('registering callback for %s OFF', 2), call.debug('registering callback for %s ON', 3), call.debug('registering callback for %s OFF', 3), call.debug('done registering callbacks'), call.info('Initial pin states: %s', [10, 11, 12, 13]) ] assert mock_listener.mock_calls == [ call.register(0, IODIR_ON, self.cls.handle_input_on), call.register(0, IODIR_OFF, self.cls.handle_input_off), call.register(1, IODIR_ON, self.cls.handle_input_on), call.register(1, IODIR_OFF, self.cls.handle_input_off), call.register(2, IODIR_ON, self.cls.handle_input_on), call.register(2, IODIR_OFF, self.cls.handle_input_off), call.register(3, IODIR_ON, self.cls.handle_input_on), call.register(3, IODIR_OFF, self.cls.handle_input_off), ] assert self.cls.current_values == [10, 11, 12, 13]
def test_read_and_send_bad_status_code(self): s1 = Mock(spec_set=BaseSensor) s1.read.return_value = { 'sensor1': {'data': 's1data'}, 'sensor2': {'data': 's2data'}, } self.cls.sensors = [s1] with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s.requests.post' % pbm, autospec=True) as mock_post: mock_post.return_value = Mock(status_code=404, text='foo') self.cls.read_and_send() url = 'http://foo.bar.baz:1234/v1/sensors/update' data = { 'host_id': 'myhostid', 'sensors': { 'sensor1': {'data': 's1data'}, 'sensor2': {'data': 's2data'} } } assert mock_post.mock_calls == [ call(url, json=data) ] assert mock_logger.mock_calls == [ call.debug('Reading sensors'), call.debug('POSTing sensor data to %s: %s', url, data), call.error('Error POSTing sensor data; got status code %s: %s', 404, 'foo') ]
def test_read_and_send_exception(self): def se_exc(*args, **kwargs): raise Exception() s1 = Mock(spec_set=BaseSensor) s1.read.return_value = { 'sensor1': {'data': 's1data'}, 'sensor2': {'data': 's2data'}, } self.cls.sensors = [s1] with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s.requests.post' % pbm, autospec=True) as mock_post: mock_post.side_effect = se_exc self.cls.read_and_send() url = 'http://foo.bar.baz:1234/v1/sensors/update' data = { 'host_id': 'myhostid', 'sensors': { 'sensor1': {'data': 's1data'}, 'sensor2': {'data': 's2data'} } } assert mock_post.mock_calls == [ call(url, json=data) ] assert mock_logger.mock_calls == [ call.debug('Reading sensors'), call.debug('POSTing sensor data to %s: %s', url, data), call.exception('Exception caught when trying to POST data to ' 'Engine; will try again at next interval.') ]
def test_update(self): req_data = { 'host_id': 'myhostid', 'sensors': { 'sensor1': { 'type': 's1type', 'value': 12.345, 'alias': 's1alias', 'extra': 'extraS1' } } } req_json = json.dumps(req_data) mock_req = MagicMock(spec_set=Request) type(mock_req).responseHeaders = Mock() mock_req.content.getvalue.return_value = req_json type(mock_req).client = Mock(host='myhost') with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s._parse_json_request' % pb) as mock_parse: with patch('%s.update_sensor' % pbm, autospec=True) as mock_upd: mock_upd.return_value = 'myid' mock_parse.return_value = req_data res = self.cls.update(None, mock_req) assert res.result == '{"ids": ["myid"], "status": "ok"}' assert mock_parse.mock_calls == [call(mock_req)] assert mock_req.mock_calls == [call.setResponseCode(201)] assert mock_logger.mock_calls == [ call.debug( 'Received sensor update request from %s with content: %s', 'myhost', req_data), call.debug('update_sensor() return value: %s', 'myid') ]
def test_run(self): def se_ras(klass): if mock_ras.call_count < 4: return None raise RuntimeError() with patch('%s.read_and_send' % pb, autospec=True) as mock_ras: with patch('%s.sleep' % pbm, autospec=True) as mock_sleep: with patch('%s.logger' % pbm, autospec=True) as mock_logger: mock_ras.side_effect = se_ras with pytest.raises(RuntimeError): self.cls.run() assert mock_ras.mock_calls == [ call(self.cls), call(self.cls), call(self.cls), call(self.cls) ] assert mock_sleep.mock_calls == [ call(60.0), call(60.0), call(60.0) ] assert mock_logger.mock_calls == [ call.info('Running sensor daemon loop...'), call.debug('Sleeping %ss', 60.0), call.debug('Sleeping %ss', 60.0), call.debug('Sleeping %ss', 60.0) ]
def test_setup_mongodb(self): with patch('%s.MongoClient' % pbm, autospec=True) as mock_client: with patch('%s.logger' % pbm, autospec=True) as mock_logger: setup_mongodb('h', 12) assert mock_client.mock_calls == [ call('h', 12, connect=True, connectTimeoutMS=5000, serverSelectionTimeoutMS=5000, socketTimeoutMS=5000, waitQueueTimeoutMS=5000), call().get_database(MONGO_DB_NAME), call().get_database().get_collection('dbtest'), call().get_database().get_collection().update( {'_id': 'setup_mongodb'}, {'dt': FakeDatetime(2015, 1, 10, 12, 13, 14), '_id': 'setup_mongodb' }, j=True, upsert=True, w=1 ), call().close() ] assert mock_logger.mock_calls == [ call.debug('Connecting to MongoDB via pymongo at %s:%s', 'h', 12), call.info('Connected to MongoDB via pymongo at %s:%s', 'h', 12), call.debug('Trying a DB upsert'), call.debug('MongoDB write completed successfully.') ]
def test_write_zip(self): with patch('%s.zipfile.ZipFile' % pbm, autospec=True) as mock_zf: with patch('%s.logger' % pbm, autospec=True) as mock_logger: self.cls._write_zip('myfsrc', 'mypath.zip') # the only way I can find to capture attributes being set on the ZipInfo # is to not mock it, but use a real ZipInfo object. Unfortunately, that # makes assertin on calls a bit more difficult... assert len(mock_zf.mock_calls) == 4 assert mock_zf.mock_calls[0] == call('mypath.zip', 'w') assert mock_zf.mock_calls[1] == call().__enter__() assert mock_zf.mock_calls[3] == call().__exit__(None, None, None) # ok, now handle the second call, which should have the ZipInfo # as its first argument... # test that it's the right chained method call assert mock_zf.mock_calls[2][0] == '().__enter__().writestr' # test its arguments arg_tup = mock_zf.mock_calls[2][1] assert isinstance(arg_tup[0], ZipInfo) assert arg_tup[0].filename == 'webhook2lambda2sqs_func.py' assert arg_tup[0].date_time == (2016, 7, 1, 2, 3, 4) assert arg_tup[0].external_attr == 0x0755 << 16 assert arg_tup[1] == 'myfsrc' assert mock_logger.mock_calls == [ call.debug('setting zipinfo date to: %s', (2016, 7, 1, 2, 3, 4)), call.debug('setting zipinfo file mode to: %s', (0x0755 << 16)), call.debug('writing zip file at: %s', 'mypath.zip') ]
def test_generate(self): with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s._get_config' % pb, autospec=True) as mock_get: with patch('%s.open' % pbm, mock_open(), create=True) as m_open: with patch('%s._write_zip' % pb, autospec=True) as mock_zip: mock_get.return_value = 'myjson' self.cls.generate('myfunc') assert mock_get.mock_calls == [call(self.cls, 'myfunc')] assert m_open.mock_calls == [ call('./webhook2lambda2sqs_func.py', 'w'), call().__enter__(), call().write('myfunc'), call().__exit__(None, None, None), call('./webhook2lambda2sqs.tf.json', 'w'), call().__enter__(), call().write('myjson'), call().__exit__(None, None, None) ] assert mock_zip.mock_calls == [ call(self.cls, 'myfunc', './webhook2lambda2sqs_func.zip') ] assert mock_logger.mock_calls == [ call.warning('Writing lambda function source to: ' './webhook2lambda2sqs_func.py'), call.debug('lambda function written'), call.warning('Writing lambda function source zip file to: ' './webhook2lambda2sqs_func.zip'), call.debug('lambda zip written'), call.warning('Writing terraform configuration JSON to: ' './webhook2lambda2sqs.tf.json'), call.debug('terraform configuration written'), call.warning('Completed writing lambda function and TF config.') ]
def test_discover_owfs(self): self.cls.owfs_paths = ['/foo', '/bar', '/baz'] def se_exists(path): if path.startswith('/baz'): return True if path == '/bar': return True return False with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s.os.path.exists' % pbm, autospec=True) as mock_ex: mock_ex.side_effect = se_exists res = self.cls._discover_owfs() assert res == '/baz' assert mock_ex.mock_calls == [ call('/foo'), call('/bar'), call('/bar/settings/units/temperature_scale'), call('/baz'), call('/baz/settings/units/temperature_scale') ] assert mock_logger.mock_calls == [ call.debug('Attempting to find OWFS path/mountpoint from list ' 'of common options: %s', ['/foo', '/bar', '/baz']), call.debug('Path %s does not exist; skipping', '/foo'), call.debug('Path %s exists but does not appear to have OWFS ' 'mounted', '/bar'), call.info('Found OWFS mounted at: %s', '/baz') ]
def test_main(self): """ test main function """ def se_config_get(name): x = { 'api_port': 8088, 'verbose': 0, 'mongo_host': 'mhost', 'mongo_port': 1234 } return x.get(name, None) mock_args = Mock(verbose=0, show_config=False) with patch('%s.logger' % pbm, autospec=True) as mocklogger: with patch.multiple( pbm, autospec=True, APIServer=DEFAULT, Site=DEFAULT, reactor=DEFAULT, PythonLoggingObserver=DEFAULT, parse_args=DEFAULT, show_config=DEFAULT, Config=DEFAULT, set_log_info=DEFAULT, set_log_debug=DEFAULT, connect_mongodb=DEFAULT, ) as mocks: mocks['Config'].return_value.get.side_effect = se_config_get mocks['parse_args'].return_value = mock_args mocks['connect_mongodb'].return_value = self.mock_mongo with patch.object(sys, 'argv', ['bad', 'foo', 'bar']): main() assert mocks['show_config'].mock_calls == [] assert mocks['parse_args'].mock_calls == [call(['foo', 'bar'])] assert mocks['connect_mongodb'].mock_calls == [call('mhost', 1234)] assert mocks['APIServer'].mock_calls == [ call(self.mock_mongo), call().app.resource() ] site_app_res = mocks[ 'APIServer'].return_value.app.resource.return_value assert mocks['Site'].mock_calls == [call(site_app_res)] assert mocks['reactor'].mock_calls == [ call.listenTCP(8088, mocks['Site'].return_value), call.run() ] assert mocks['PythonLoggingObserver'].mock_calls == [ call(), call().start() ] assert mocklogger.mock_calls == [ call.debug('instantiating apiserver'), call.debug("reactor.listenTCP"), call.debug("reactor.run() - listening on port %d", 8088), call.debug("reactor.run() returned") ] assert mocks['set_log_info'].mock_calls == [] assert mocks['set_log_debug'].mock_calls == []
def test_read_and_send(self): def se_exc(): raise Exception() s1 = Mock(spec_set=BaseSensor) s1.read.return_value = { 'sensor1': { 'data': 's1data' }, 'sensor2': { 'data': 's2data' }, } s2 = Mock(spec_set=BaseSensor) s2.read.side_effect = se_exc s3 = Mock(spec_set=BaseSensor) s3.read.return_value = { 'sensor31': { 'data': 's31data' }, 'sensor32': { 'data': 's32data' }, } self.cls.sensors = [s1, s2, s3] with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s.requests.post' % pbm, autospec=True) as mock_post: mock_post.return_value = Mock(status_code=201) self.cls.read_and_send() url = 'http://foo.bar.baz:1234/v1/sensors/update' data = { 'host_id': 'myhostid', 'sensors': { 'sensor1': { 'data': 's1data' }, 'sensor2': { 'data': 's2data' }, 'sensor31': { 'data': 's31data' }, 'sensor32': { 'data': 's32data' }, } } assert mock_post.mock_calls == [call(url, json=data)] assert mock_logger.mock_calls == [ call.debug('Reading sensors'), call.exception('Exception reading sensor %s', 'BaseSensor'), call.debug('POSTing sensor data to %s: %s', url, data), call.info('POSTed sensor data to Engine') ]
def test_run_zero_artifact(self, logger_mock): """ArtifactExtractor - Test run method extract zero artifact""" self._artifact_extractor.run(generate_categorized_records()) logger_mock.assert_has_calls([ call.debug('Extracting artifacts from %d %s logs', 2, 'log_type_01_sub_type_01'), call.debug('Extracted %d artifact(s)', 0) ]) assert_equal(self._artifact_extractor._artifacts, list())
def test_none(self): self.cls.refresh_timeout = None check_dt = datetime(2016, 12, 16, hour=10, minute=30, second=12, tzinfo=utc) now_dt = datetime(2016, 12, 16, hour=11, minute=30, second=12, tzinfo=utc) statuses = [ {'statuses': [{'status': 'none'}]}, {'statuses': [{'status': 'enqueued'}]}, {'statuses': [{'status': 'processing'}]}, {'statuses': [{'status': 'none'}]} ] m_s = self.mock_conn.describe_trusted_advisor_check_refresh_statuses with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s.sleep' % pbm, autospec=True) as mock_sleep: with patch('%s._get_check_result' % pb, autospec=True) as gcr: with patch('%s.datetime_now' % pbm) as mock_dt_now: mock_dt_now.return_value = now_dt m_s.side_effect = statuses gcr.return_value = ({'foo': 'bar'}, check_dt) res = self.cls._poll_for_refresh('abc123') assert res == {'foo': 'bar'} assert self.mock_conn.mock_calls == [ call.describe_trusted_advisor_check_refresh_statuses( checkIds=['abc123']), call.describe_trusted_advisor_check_refresh_statuses( checkIds=['abc123']), call.describe_trusted_advisor_check_refresh_statuses( checkIds=['abc123']), call.describe_trusted_advisor_check_refresh_statuses( checkIds=['abc123']) ] assert gcr.mock_calls == [call(self.cls, 'abc123')] assert mock_sleep.mock_calls == [ call(30), call(30), call(30) ] assert mock_dt_now.mock_calls == [ call(), call(), call(), call(), call() ] assert mock_logger.mock_calls == [ call.warning('Polling for TA check %s refresh...', 'abc123'), call.debug('Checking refresh status'), call.info('Refresh status: %s; sleeping 30s', 'none'), call.debug('Checking refresh status'), call.info('Refresh status: %s; sleeping 30s', 'enqueued'), call.debug('Checking refresh status'), call.info('Refresh status: %s; sleeping 30s', 'processing'), call.debug('Checking refresh status'), call.warning('Trusted Advisor check refresh status went ' 'from "%s" to "%s"; refresh is either complete ' 'or timed out on AWS side. Continuing', 'processing', 'none'), call.info('Done polling for check refresh'), call.debug('Check shows last refresh time of: %s', check_dt) ]
def test_find_usage_spot_fleets_paginated(self): data = deepcopy(fixtures.test_find_usage_spot_fleets) data['NextToken'] = 'string' mock_conn = Mock() mock_client_conn = Mock() mock_client_conn.describe_spot_fleet_requests.return_value = data cls = _Ec2Service(21, 43) cls.resource_conn = mock_conn cls.conn = mock_client_conn with patch('awslimitchecker.services.ec2.logger') as mock_logger: cls._find_usage_spot_fleets() assert mock_conn.mock_calls == [] assert mock_client_conn.mock_calls == [ call.describe_spot_fleet_requests() ] total = cls.limits['Max active spot fleets per ' 'region'].get_current_usage() assert len(total) == 1 assert total[0].get_value() == 2 totalcap = cls.limits['Max target capacity for all spot fleets ' 'in region'].get_current_usage() assert len(totalcap) == 1 assert totalcap[0].get_value() == 44 cap_per_fleet = cls.limits['Max target capacity per spot ' 'fleet'].get_current_usage() assert len(cap_per_fleet) == 2 assert cap_per_fleet[0].get_value() == 11 assert cap_per_fleet[0].resource_id == 'req2' assert cap_per_fleet[1].get_value() == 33 assert cap_per_fleet[1].resource_id == 'req4' launch_specs = cls.limits['Max launch specifications ' 'per spot fleet'].get_current_usage() assert len(launch_specs) == 2 assert launch_specs[0].get_value() == 3 assert launch_specs[0].resource_id == 'req2' assert launch_specs[1].get_value() == 1 assert launch_specs[1].resource_id == 'req4' assert mock_logger.mock_calls == [ call.debug('Getting spot fleet request usage'), call.error('Error: describe_spot_fleet_requests() response ' 'includes pagination token, but pagination not ' 'configured in awslimitchecker.'), call.debug('Skipping spot fleet request %s in state %s', 'req1', 'failed'), call.debug('Skipping spot fleet request %s in state %s', 'req3', 'modifying') ]
def test_handle_files(self): flist = [ 'foobar', 'pinevent_1420863332.123456_pin2_state1', 'pinevent_csds_pin3_state1', 'pinevent_1420863326.123456_pin3_state0', 'pinevent_1420863326.123456_pin2_state1', 'xsfjef_fhejfec_dfhe', 'pinevent_1420863326.456789_pin3_state2', ] ex = Exception('foo') def se_handle(fname, evt_datetime, pin, state): if fname == 'pinevent_1420863332.123456_pin2_state1': raise ex type(self.config).QUEUE_PATH = '/foo/bar' with patch('%s.logger' % pbm) as mock_logger: with patch('%s.os.listdir' % pbm) as mock_listdir: with patch('%s.handle_one_file' % pb) as mock_handle: with patch('%s.os.unlink' % pbm) as mock_unlink: mock_listdir.return_value = flist mock_handle.side_effect = se_handle self.cls.handle_files() assert mock_logger.mock_calls == [ call.info("Found %d new events", 3), call.debug('File handled; removing: %s', 'pinevent_1420863326.123456_pin2_state1'), call.debug('File handled; removing: %s', 'pinevent_1420863326.123456_pin3_state0'), call.exception('Execption while handling event file %s', 'pinevent_1420863332.123456_pin2_state1'), ] assert mock_listdir.mock_calls == [call('/foo/bar')] assert mock_handle.mock_calls == [ call('pinevent_1420863326.123456_pin2_state1', datetime(2015, 1, 9, 23, 15, 26, 123456), 2, 1), call('pinevent_1420863326.123456_pin3_state0', datetime(2015, 1, 9, 23, 15, 26, 123456), 3, 0), call('pinevent_1420863332.123456_pin2_state1', datetime(2015, 1, 9, 23, 15, 32, 123456), 2, 1), ] assert mock_unlink.mock_calls == [ call('/foo/bar/pinevent_1420863326.123456_pin2_state1'), call('/foo/bar/pinevent_1420863326.123456_pin3_state0') ]
def test_find_usage_apis_stages_now_paginated(self): mock_conn = Mock() res = result_fixtures.ApiGateway.get_rest_apis mock_paginator = Mock() mock_paginator.paginate.return_value = res def se_res_paginate(restApiId=None): return result_fixtures.ApiGateway.get_resources[restApiId] mock_res_paginator = Mock() mock_res_paginator.paginate.side_effect = se_res_paginate def se_get_paginator(api_name): if api_name == 'get_rest_apis': return mock_paginator elif api_name == 'get_resources': return mock_res_paginator def se_paginate_dict(*args, **kwargs): if args[0] == mock_conn.get_documentation_parts: return result_fixtures.ApiGateway.doc_parts[kwargs['restApiId']] if args[0] == mock_conn.get_authorizers: return result_fixtures.ApiGateway.authorizers[ kwargs['restApiId'] ] def se_get_stages(restApiId=None): r = deepcopy(result_fixtures.ApiGateway.stages[restApiId]) r['position'] = 'foo' return r mock_conn.get_paginator.side_effect = se_get_paginator mock_conn.get_stages.side_effect = se_get_stages cls = _ApigatewayService(21, 43, {}, None) cls.conn = mock_conn with patch('%s.paginate_dict' % pbm, autospec=True) as mock_pd: with patch('%s.logger' % pbm) as mock_logger: mock_pd.side_effect = se_paginate_dict cls._find_usage_apis() assert mock_logger.mock_calls == [ call.debug('Finding usage for APIs'), call.debug('Found %d APIs', 5), call.debug('Finding usage for per-API limits'), call.warning( 'APIGateway get_stages returned more keys than present in ' 'boto3 docs: %s', ['item', 'position'] ) ]
def test_find_usage_apis_stages_now_paginated(self): mock_conn = Mock() res = result_fixtures.ApiGateway.get_rest_apis mock_paginator = Mock() mock_paginator.paginate.return_value = res def se_res_paginate(restApiId=None): return result_fixtures.ApiGateway.get_resources[restApiId] mock_res_paginator = Mock() mock_res_paginator.paginate.side_effect = se_res_paginate def se_get_paginator(api_name): if api_name == 'get_rest_apis': return mock_paginator elif api_name == 'get_resources': return mock_res_paginator def se_paginate_dict(*args, **kwargs): if args[0] == mock_conn.get_documentation_parts: return result_fixtures.ApiGateway.doc_parts[kwargs['restApiId']] if args[0] == mock_conn.get_authorizers: return result_fixtures.ApiGateway.authorizers[ kwargs['restApiId'] ] def se_get_stages(restApiId=None): r = deepcopy(result_fixtures.ApiGateway.stages[restApiId]) r['position'] = 'foo' return r mock_conn.get_paginator.side_effect = se_get_paginator mock_conn.get_stages.side_effect = se_get_stages cls = _ApigatewayService(21, 43) cls.conn = mock_conn with patch('%s.paginate_dict' % pbm, autospec=True) as mock_pd: with patch('%s.logger' % pbm) as mock_logger: mock_pd.side_effect = se_paginate_dict cls._find_usage_apis() assert mock_logger.mock_calls == [ call.debug('Finding usage for APIs'), call.debug('Found %d APIs', 5), call.debug('Finding usage for per-API limits'), call.warning( 'APIGateway get_stages returned more keys than present in ' 'boto3 docs: %s', ['item', 'position'] ) ]
def test_find_usage_instances_key_error(self): mock_inst1A = Mock(spec_set=Instance) type(mock_inst1A).id = '1A' type(mock_inst1A).instance_type = 'foobar' type(mock_inst1A).spot_instance_request_id = None mock_res1 = Mock(spec_set=Reservation) type(mock_res1).instances = [mock_inst1A] mock_conn = Mock(spec_set=EC2Connection) return_value = [mock_res1] cls = _Ec2Service(21, 43) cls.conn = mock_conn cls.limits = {'Running On-Demand t2.micro instances': Mock()} with patch( '%s._instance_types' % self.pb, autospec=True) as mock_itypes: with patch('awslimitchecker.services.ec2.logger') as mock_logger: with patch('%s.boto_query_wrapper' % self.pbm) as mock_wrapper: mock_wrapper.return_value = return_value mock_itypes.return_value = ['t2.micro'] cls._instance_usage() assert mock_logger.mock_calls == [ call.debug('Getting usage for on-demand instances'), call.error("ERROR - unknown instance type '%s'; not counting", 'foobar'), ] assert mock_conn.mock_calls == [] assert mock_wrapper.mock_calls == [call(mock_conn.get_all_reservations)]
def test_find_usage_invalid_region_503(self): resp = { 'ResponseMetadata': { 'HTTPStatusCode': 503, 'RequestId': '7d74c6f0-c789-11e5-82fe-a96cdaa6d564' }, 'Error': { 'Message': 'Service Unavailable', 'Code': '503' } } ce = ClientError(resp, 'GetSendQuota') def se_get(): raise ce mock_conn = Mock() mock_conn.get_send_quota.side_effect = se_get with patch('%s.connect' % pb) as mock_connect: with patch('%s.logger' % pbm) as mock_logger: cls = _SesService(21, 43, {}, None) cls.conn = mock_conn assert cls._have_usage is False cls.find_usage() assert mock_connect.mock_calls == [call()] assert cls._have_usage is False assert mock_logger.mock_calls == [ call.debug('Checking usage for service %s', 'SES'), call.warning('Skipping SES: %s', ce) ] assert mock_conn.mock_calls == [call.get_send_quota()] assert len(cls.limits['Daily sending quota'].get_current_usage()) == 0
def test_boto3_connection_kwargs_profile(self): cls = ConnectableTester(profile_name='myprof') m_creds = Mock() type(m_creds).access_key = 'ak' type(m_creds).secret_key = 'sk' type(m_creds).token = 'tkn' mock_session = Mock() m_sess = Mock() m_sess.get_credentials.return_value = m_creds type(mock_session)._session = m_sess with patch('%s._get_sts_token' % pb) as mock_get_sts: with patch('%s.logger' % pbm) as mock_logger: with patch('%s.boto3.Session' % pbm) as mock_sess: Connectable.credentials = None mock_sess.return_value = mock_session res = cls._boto3_connection_kwargs assert mock_get_sts.mock_calls == [] assert mock_logger.mock_calls == [ call.debug('Using credentials profile: %s', 'myprof') ] assert mock_sess.mock_calls == [call(profile_name='myprof')] assert res == { 'region_name': None, 'aws_access_key_id': 'ak', 'aws_secret_access_key': 'sk', 'aws_session_token': 'tkn' }
def test_boto3_connection_kwargs_sts(self): mock_creds = Mock() type(mock_creds).access_key = 'sts_ak' type(mock_creds).secret_key = 'sts_sk' type(mock_creds).session_token = 'sts_token' with patch('%s._get_sts_token' % pb) as mock_get_sts: with patch('%s.logger' % pbm) as mock_logger: with patch('%s.boto3.Session' % pbm) as mock_sess: with patch.dict('%s._services' % pbm, {}, clear=True): cls = AwsLimitChecker(account_id='123', account_role='myrole', region='myregion') mock_get_sts.return_value = mock_creds mock_get_sts.reset_mock() mock_logger.reset_mock() mock_sess.reset_mock() res = cls._boto_conn_kwargs assert mock_get_sts.mock_calls == [call()] assert mock_logger.mock_calls == [ call.debug( "Connecting for account %s role '%s' with STS " "(region: %s)", '123', 'myrole', 'myregion') ] assert mock_sess.mock_calls == [] assert res == { 'region_name': 'myregion', 'aws_access_key_id': 'sts_ak', 'aws_secret_access_key': 'sts_sk', 'aws_session_token': 'sts_token' }
def test_find_usage_networking_eni_sg(self): mocks = fixtures.test_find_usage_networking_eni_sg mock_conn = Mock() mock_conn.network_interfaces.all.return_value = mocks cls = _Ec2Service(21, 43) cls.resource_conn = mock_conn with patch('awslimitchecker.services.ec2.logger') as mock_logger: cls._find_usage_networking_eni_sg() assert mock_logger.mock_calls == [ call.debug("Getting usage for EC2 Network Interfaces"), ] limit = cls.limits['VPC security groups per elastic network interface'] sorted_usage = sorted(limit.get_current_usage()) assert len(sorted_usage) == 3 assert sorted_usage[0].limit == limit assert sorted_usage[0].resource_id == 'if-1' assert sorted_usage[0].get_value() == 0 assert sorted_usage[1].limit == limit assert sorted_usage[1].resource_id == 'if-2' assert sorted_usage[1].get_value() == 3 assert sorted_usage[2].limit == limit assert sorted_usage[2].resource_id == 'if-3' assert sorted_usage[2].get_value() == 8 assert mock_conn.mock_calls == [call.network_interfaces.all()]
def test_find_usage_networking_eips(self): mocks = fixtures.test_find_usage_networking_eips mock_conn = Mock() mock_conn.classic_addresses.all.return_value = mocks['Classic'] mock_conn.vpc_addresses.all.return_value = mocks['Vpc'] cls = _Ec2Service(21, 43) cls.resource_conn = mock_conn with patch('awslimitchecker.services.ec2.logger') as mock_logger: cls._find_usage_networking_eips() assert mock_logger.mock_calls == [ call.debug("Getting usage for EC2 EIPs"), ] limit = cls.limits['VPC Elastic IP addresses (EIPs)'] usage = limit.get_current_usage() assert len(usage) == 1 assert usage[0].limit == limit assert usage[0].get_value() == 2 assert usage[0].resource_id is None assert usage[0].aws_type == 'AWS::EC2::EIP' limit = cls.limits['Elastic IP addresses (EIPs)'] usage = limit.get_current_usage() assert len(usage) == 1 assert usage[0].limit == limit assert usage[0].get_value() == 1 assert usage[0].resource_id is None assert usage[0].aws_type == 'AWS::EC2::EIP' assert mock_conn.mock_calls == [ call.vpc_addresses.all(), call.classic_addresses.all() ]
def test_can_add_mandate(self, logging_mock): political_office = PoliticalOfficeFactory.create() legislator = LegislatorFactory.create() now = datetime.utcnow() d1 = now.date() date_start = date_to_timestamp(d1) d2 = (now + timedelta(days=10)).date() date_end = date_to_timestamp(d2) data = { 'legislator_id': legislator.id, 'political_office_id': political_office.id, 'date_start': date_start, 'date_end': date_end, } mandate = Mandate.add_mandate(self.db, data) expect(mandate.legislator).to_equal(legislator) expect(mandate.political_office).to_equal(political_office) expect(mandate.date_start).to_equal(d1) expect(mandate.date_end).to_equal(d2) expect(logging_mock.mock_calls).to_include( call.debug('Added mandate: "%s"', str(mandate)) )
def test_find_usage_instances_key_error(self): mock_inst1A = Mock(spec_set=Instance) type(mock_inst1A).id = '1A' type(mock_inst1A).instance_type = 'foobar' type(mock_inst1A).spot_instance_request_id = None mock_res1 = Mock(spec_set=Reservation) type(mock_res1).instances = [mock_inst1A] mock_conn = Mock(spec_set=EC2Connection) return_value = [mock_res1] cls = _Ec2Service(21, 43) cls.conn = mock_conn cls.limits = {'Running On-Demand t2.micro instances': Mock()} with patch('%s._instance_types' % self.pb, autospec=True) as mock_itypes: with patch('awslimitchecker.services.ec2.logger') as mock_logger: with patch('%s.boto_query_wrapper' % self.pbm) as mock_wrapper: mock_wrapper.return_value = return_value mock_itypes.return_value = ['t2.micro'] cls._instance_usage() assert mock_logger.mock_calls == [ call.debug('Getting usage for on-demand instances'), call.error("ERROR - unknown instance type '%s'; not counting", 'foobar'), ] assert mock_conn.mock_calls == [] assert mock_wrapper.mock_calls == [ call(mock_conn.get_all_reservations) ]
def test_parser_timeout(self): """ Test to verify fix for https://github.com/andresriancho/w3af/issues/6723 "w3af running long time more than 24h" """ modc = 'w3af.core.data.parsers.parser_cache.%s' modp = 'w3af.core.data.parsers.document_parser.%s' with patch(modc % 'om.out') as om_mock,\ patch(modc % 'ParserCache.PARSER_TIMEOUT', new_callable=PropertyMock) as timeout_mock,\ patch(modp % 'DocumentParser.PARSERS', new_callable=PropertyMock) as parsers_mock: timeout_mock.return_value = 1 parsers_mock.return_value = [DelayedParser] html = '<html>foo!</html>' http_resp = _build_http_response(html, u'text/html') try: self.dpc.get_document_parser_for(http_resp) except BaseFrameworkException: msg = '[timeout] The parser took more than %s seconds'\ ' to complete parsing of "%s", killed it!' error = msg % (ParserCache.PARSER_TIMEOUT, http_resp.get_url()) self.assertIn(call.debug(error), om_mock.mock_calls) else: self.assertTrue(False)
def test_audit_plugin_timeout_threads(self): """ I want to make sure that when stopit kills the real audit function, the threads which are called from it won't do anything strange. The plan is to scan something large with httpretty, with delays in the HTTP responses to simulate a slow network and a low PLUGIN_TIMEOUT to make the test quicker. """ plugin_inst = self.w3afcore.plugins.get_plugin_inst('audit', 'sqli') url = URL(self.target_url) freq = FuzzableRequest(url) orig_response = plugin_inst.get_original_response(freq) mod = 'w3af.core.controllers.plugins.audit_plugin.%s' with patch(mod % 'om.out') as om_mock,\ patch(mod % 'AuditPlugin.PLUGIN_TIMEOUT', new_callable=PropertyMock) as timeout_mock: timeout_mock.return_value = 2 plugin_inst.audit_with_copy(freq, orig_response) msg = '[timeout] The "%s" plugin took more than %s seconds to'\ ' complete the analysis of "%s", killing it!' error = msg % (plugin_inst.get_name(), plugin_inst.PLUGIN_TIMEOUT, freq.get_url()) self.assertIn(call.debug(error), om_mock.mock_calls)
def test_find_usage_snapshots(self): mock_snap1 = Mock(spec_set=Snapshot) type(mock_snap1).id = 'snap-1' mock_snap2 = Mock(spec_set=Snapshot) type(mock_snap2).id = 'snap-2' mock_snap3 = Mock(spec_set=Snapshot) type(mock_snap3).id = 'snap-3' mock_conn = Mock(spec_set=EC2Connection) return_value = [ mock_snap1, mock_snap2, mock_snap3, ] cls = _EbsService(21, 43) cls.conn = mock_conn with patch('awslimitchecker.services.ebs.logger') as mock_logger: with patch('%s.boto_query_wrapper' % self.pbm) as mock_wrapper: mock_wrapper.return_value = return_value cls._find_usage_snapshots() assert mock_logger.mock_calls == [ call.debug("Getting usage for EBS snapshots"), ] assert len(cls.limits['Active snapshots'].get_current_usage()) == 1 assert cls.limits['Active snapshots' ''].get_current_usage()[0].get_value() == 3 assert mock_conn.mock_calls == [] assert mock_wrapper.mock_calls == [ call(mock_conn.get_all_snapshots, owner='self') ]
def test_init(self): # dict should be of _AwsService instances services = {'SvcFoo': self.mock_svc1, 'SvcBar': self.mock_svc2} assert self.cls.services == services # _AwsService instances should exist, but have no other calls assert self.mock_foo.mock_calls == [ call(80, 99, {'region_name': None}, self.mock_quotas.return_value) ] assert self.mock_bar.mock_calls == [ call(80, 99, {'region_name': None}, self.mock_quotas.return_value) ] assert self.mock_ta_constr.mock_calls == [ call(services, {'region_name': None}, ta_api_region='us-east-1', ta_refresh_mode=None, ta_refresh_timeout=None) ] assert self.mock_svc1.mock_calls == [] assert self.mock_svc2.mock_calls == [] assert self.cls.ta == self.mock_ta assert self.mock_version.mock_calls == [call()] assert self.cls.vinfo == self.mock_ver_info assert self.mock_glv.mock_calls == [] assert self.mock_logger.mock_calls == [ call.debug('Connecting to region %s', None) ] assert self.cls.role_partition == 'aws' assert self.mock_quotas.mock_calls == [call({'region_name': None})]
def test_boto3_connection_kwargs_profile(self): m_creds = Mock() type(m_creds).access_key = 'ak' type(m_creds).secret_key = 'sk' type(m_creds).token = 'tkn' mock_session = Mock() m_sess = Mock() m_sess.get_credentials.return_value = m_creds type(mock_session)._session = m_sess with patch('%s._get_sts_token' % pb) as mock_get_sts: with patch('%s.logger' % pbm) as mock_logger: with patch('%s.boto3.Session' % pbm) as mock_sess: with patch.dict('%s._services' % pbm, {}, clear=True): mock_sess.return_value = mock_session cls = AwsLimitChecker(profile_name='myprof') mock_get_sts.reset_mock() mock_logger.reset_mock() mock_sess.reset_mock() res = cls._boto_conn_kwargs assert mock_get_sts.mock_calls == [] assert mock_logger.mock_calls == [ call.debug('Using credentials profile: %s', 'myprof') ] assert mock_sess.mock_calls == [call(profile_name='myprof')] assert res == { 'region_name': None, 'aws_access_key_id': 'ak', 'aws_secret_access_key': 'sk', 'aws_session_token': 'tkn' }
def test_validate_fail(self): def se_run(*args, **kwargs): print(args) if args[0] == 'version': return "Terraform v1.2.3\nfoo\n" if args[0] == 'validate': raise Exception() # validate is called in __init__; we can't easily patch and re-call with patch('%s._run_tf' % pb) as mock_run: mock_run.side_effect = se_run with patch('%s.logger' % pbm, autospec=True) as mock_logger: with pytest.raises(Exception) as excinfo: TerraformRunner(self.mock_config(), 'terraform-bin') assert exc_msg(excinfo.value) == 'ERROR: Terraform config validation ' \ 'failed.' assert mock_run.mock_calls == [ call('version'), call('validate', ['.']) ] assert mock_logger.mock_calls == [ call.debug('Terraform version: %s', (1, 2, 3)), call.critical("Terraform config validation failed. This is almost " "certainly a bug in webhook2lambda2sqs; please " "re-run with '-vv' and open a bug at <https://" "github.com/jantman/webhook2lambda2sqs/issues>") ]
def test_update_limits_from_api_low_max_instances(self): fixtures = result_fixtures.VPC() response = fixtures.test_update_limits_from_api_low_max_instances mock_conn = Mock() mock_client_conn = Mock() mock_client_conn.describe_account_attributes.return_value = response cls = _VpcService(21, 43) cls.resource_conn = mock_conn cls.conn = mock_client_conn with patch('awslimitchecker.services.vpc.logger') as mock_logger: cls._update_limits_from_api() assert mock_conn.mock_calls == [] assert mock_client_conn.mock_calls == [ call.describe_account_attributes() ] assert mock_logger.mock_calls == [ call.info("Querying EC2 DescribeAccountAttributes for limits"), call.debug('Done setting limits from API') ] limit_name = 'Network interfaces per Region' assert cls.limits[limit_name].api_limit is None assert cls.limits[limit_name].get_limit() == DEFAULT_ENI_LIMIT
def test_find_usage_snapshots(self): response = result_fixtures.EBS.test_find_usage_snapshots mock_conn = Mock() cls = _EbsService(21, 43) cls.conn = mock_conn with patch('awslimitchecker.services.ebs.logger') as mock_logger: with patch('%s.paginate_dict' % self.pbm) as mock_paginate: mock_paginate.return_value = response cls._find_usage_snapshots() assert mock_logger.mock_calls == [ call.debug("Getting usage for EBS snapshots"), ] assert len(cls.limits['Active snapshots'].get_current_usage()) == 1 assert cls.limits['Active snapshots' ''].get_current_usage()[0].get_value() == 3 assert mock_conn.mock_calls == [] assert mock_paginate.mock_calls == [ call( mock_conn.describe_snapshots, OwnerIds=['self'], alc_marker_path=['NextToken'], alc_data_path=['Snapshots'], alc_marker_param='NextToken' ) ]
def test_boto3_connection_kwargs_sts_again_other_account(self): cls = ConnectableTester(account_id='123', account_role='myrole', region='myregion') mock_creds = Mock() type(mock_creds).access_key = 'sts_ak' type(mock_creds).secret_key = 'sts_sk' type(mock_creds).session_token = 'sts_token' type(mock_creds).account_id = '456' with patch('%s._get_sts_token' % pb) as mock_get_sts: with patch('%s.logger' % pbm) as mock_logger: with patch('%s.boto3.Session' % pbm) as mock_sess: mock_get_sts.return_value = mock_creds Connectable.credentials = mock_creds res = cls._boto3_connection_kwargs assert mock_get_sts.mock_calls == [call()] assert mock_logger.mock_calls == [ call.debug("Previous STS credentials are for account %s; " "getting new credentials for current account " "(%s)", '456', '123') ] assert mock_sess.mock_calls == [] assert res == { 'region_name': 'myregion', 'aws_access_key_id': 'sts_ak', 'aws_secret_access_key': 'sts_sk', 'aws_session_token': 'sts_token' }
def test_set_account_info_env(self): self.cls.aws_account_id = None self.cls.aws_region = None with patch('%s.logger' % pbm, autospec=True) as mock_logger: with patch('%s.client' % pbm, autospec=True) as mock_client: mock_client.return_value.get_user.return_value = { 'User': {'Arn': 'arn:aws:iam::123456789:user/foo'} } type(mock_client.return_value)._client_config = Mock( region_name='myregion') with patch.dict( '%s.os.environ' % pbm, {'AWS_REGION': 'ar'}, clear=True): self.cls._set_account_info() assert self.cls.aws_account_id == '123456789' assert self.cls.aws_region == 'myregion' assert mock_client.mock_calls == [ call('iam', region_name='ar'), call().get_user(), call('lambda', region_name='ar') ] assert mock_logger.mock_calls == [ call.debug('Connecting to IAM with region_name=%s', 'ar'), call.info('Found AWS account ID as %s; region: %s', '123456789', 'myregion') ]
def test_find_usage_networking_eni_sg(self): mocks = fixtures.test_find_usage_networking_eni_sg mock_conn = Mock() mock_conn.network_interfaces.all.return_value = mocks cls = _Ec2Service(21, 43) cls.resource_conn = mock_conn with patch('awslimitchecker.services.ec2.logger') as mock_logger: cls._find_usage_networking_eni_sg() assert mock_logger.mock_calls == [ call.debug("Getting usage for EC2 Network Interfaces"), ] limit = cls.limits['VPC security groups per elastic network interface'] sorted_usage = sorted(limit.get_current_usage()) assert len(sorted_usage) == 3 assert sorted_usage[0].limit == limit assert sorted_usage[0].resource_id == 'if-1' assert sorted_usage[0].get_value() == 0 assert sorted_usage[1].limit == limit assert sorted_usage[1].resource_id == 'if-2' assert sorted_usage[1].get_value() == 3 assert sorted_usage[2].limit == limit assert sorted_usage[2].resource_id == 'if-3' assert sorted_usage[2].get_value() == 8 assert mock_conn.mock_calls == [ call.network_interfaces.all() ]
def test_find_usage_other_client_error(self): resp = { 'ResponseMetadata': { 'HTTPStatusCode': 400, 'RequestId': '7d74c6f0-c789-11e5-82fe-a96cdaa6d564' }, 'Error': { 'Message': 'Not Unknown', 'Code': 'NotAccessDenied', 'Type': 'Sender' } } ce = ClientError(resp, 'operation') def se_get(): raise ce mock_conn = Mock() mock_conn.get_send_quota.side_effect = se_get with patch('%s.connect' % pb) as mock_connect: with patch('%s.logger' % pbm) as mock_logger: cls = _SesService(21, 43, {}, None) cls.conn = mock_conn assert cls._have_usage is False with pytest.raises(ClientError): cls.find_usage() assert mock_connect.mock_calls == [call()] assert cls._have_usage is False assert mock_logger.mock_calls == [ call.debug('Checking usage for service %s', 'SES') ] assert mock_conn.mock_calls == [call.get_send_quota()] assert len(cls.limits['Daily sending quota'].get_current_usage()) == 0
def test_get_limit_check_id_subscription_required(self): def se_api(language=None): response = { 'ResponseMetadata': { 'HTTPStatusCode': 400, 'RequestId': '3cc9b2a8-c6e5-11e5-bc1d-b13dcea36176' }, 'Error': { 'Message': 'AWS Premium Support Subscription is required ' 'to use this service.', 'Code': 'SubscriptionRequiredException' } } raise ClientError(response, 'operation') assert self.cls.have_ta is True self.mock_conn.describe_trusted_advisor_checks.side_effect = se_api with patch('awslimitchecker.trustedadvisor' '.logger', autospec=True) as mock_logger: res = self.cls._get_limit_check_id() assert self.cls.have_ta is False assert res == (None, None) assert self.mock_conn.mock_calls == [ call.describe_trusted_advisor_checks(language='en') ] assert mock_logger.mock_calls == [ call.debug("Querying Trusted Advisor checks"), call.warning("Cannot check TrustedAdvisor: %s", 'AWS Premium Support Subscription is required to ' 'use this service.') ]
def test_clone_repo_dry_run(self): type(self.bi).ssh_clone_url = 'ssh_url' type(self.bi).https_clone_url = 'https_url' self.cls.dry_run = True ex = Exception('foo') def se_clone(url, path, branch=None): if url == 'ssh_url': raise ex return True with patch('%s.path_for_repo' % pb) as mock_path, \ patch('%s.Repo' % pbm) as mock_repo, \ patch('%s.logger' % pbm) as mock_logger: mock_path.return_value = '/repo/path' mock_repo.clone_from.side_effect = se_clone res = self.cls.clone_repo() assert mock_path.mock_calls == [call()] assert mock_repo.mock_calls == [] assert mock_logger.mock_calls == [ call.debug("Cloning %s branch %s into: %s", 'my/repo', 'master', '/repo/path'), call.info("DRY RUN - not actually cloning %s into %s", 'my/repo', '/repo/path') ] assert res == ('/repo/path', '(DRY RUN)')