def setUp(self): self.session = mock.Mock(Session) self.output_stream_factory = mock.Mock(OutputStreamFactory) # MagicMock is needed because it can handle context managers. # Normal Mock will throw AttributeErrors output_stream_context = mock.MagicMock() self.output_stream = mock.Mock() output_stream_context.__enter__.return_value = self.output_stream self.output_stream_factory.get_output_stream.return_value = \ output_stream_context self.db_reader = mock.Mock(DatabaseRecordReader) self.db_reader.iter_latest_records.return_value = [] self.db_reader.iter_records.return_value = [] self.show_cmd = ShowCommand(self.session, self.db_reader, self.output_stream_factory) self.formatter = mock.Mock(Formatter) self.add_formatter('mock', self.formatter) self.parsed_args = argparse.Namespace() self.parsed_args.format = 'mock' self.parsed_args.include = None self.parsed_args.exclude = None self.parsed_globals = argparse.Namespace() self.parsed_globals.color = 'auto' self.files = FileCreator()
def test_can_run_apicall_step_with_query(self): loaded = load_wizard(""" plan: start: values: foo: type: apicall operation: iam.ListPolicies params: Scope: AWS query: Policies[].Name """) mock_session = mock.Mock(spec=Session) mock_client = mock.Mock() mock_session.create_client.return_value = mock_client mock_client.list_policies.return_value = { 'Policies': [{'Name': 'one'}, {'Name': 'two'}], } api_step = core.APICallStep( api_invoker=core.APIInvoker(session=mock_session) ) planner = core.Planner( step_handlers={ 'apicall': api_step, }, ) parameters = planner.plan(loaded['plan']) # Note this value is the result is applying the # Polices[].Name jmespath query to the response. self.assertEqual(parameters['foo'], ['one', 'two'])
def test_invoke(self): waiter = mock.Mock() waiter_name = 'my_waiter' session = mock.Mock() session.create_client.return_value.get_waiter.return_value = waiter parameters = {'Foo': 'bar', 'Baz': 'biz'} parsed_globals = mock.Mock() parsed_globals.region = 'us-east-1' parsed_globals.endpoint_url = 'myurl' parsed_globals.verify_ssl = True waiter_caller = WaiterCaller(session, waiter_name) waiter_caller.invoke('myservice', 'MyWaiter', parameters, parsed_globals) # Make sure the client was created properly. session.create_client.assert_called_with( 'myservice', region_name=parsed_globals.region, endpoint_url=parsed_globals.endpoint_url, verify=parsed_globals.verify_ssl ) # Make sure we got the correct waiter. session.create_client.return_value.get_waiter.assert_called_with( waiter_name) # Ensure the wait command was called properly. waiter.wait.assert_called_with( Foo='bar', Baz='biz')
def test_create_and_wait_for_changeset_error_waiting_for_changeset(self): stack_name = "stack_name" template = "template" parameters = [{ "ParameterKey": "Key1", "ParameterValue": "Value", "UsePreviousValue": True }] capabilities = ["capabilities"] changeset_id = "changeset id" changeset_type = "changeset type" role_arn = "arn:aws:iam::1234567890:role" notification_arns = ["arn:aws:sns:region:1234567890:notify"] s3_uploader = None tags = [{"Key": "key1", "Value": "val1"}] self.deployer.create_changeset = mock.Mock() self.deployer.create_changeset.return_value = ChangeSetResult( changeset_id, changeset_type) self.deployer.wait_for_changeset = mock.Mock() self.deployer.wait_for_changeset.side_effect = RuntimeError with self.assertRaises(RuntimeError): result = self.deployer.create_and_wait_for_changeset( stack_name, template, parameters, capabilities, role_arn, notification_arns, s3_uploader, tags)
def test_wait_for_execute_no_changes(self): stack_name = "stack_name" changeset_type = "CREATE" mock_client = mock.Mock() mock_deployer = Deployer(mock_client) mock_waiter = mock.Mock() mock_client.get_waiter.return_value = mock_waiter waiter_error = botocore.exceptions.WaiterError(name="name", reason="reason", last_response={}) mock_waiter.wait.side_effect = waiter_error with self.assertRaises(exceptions.DeployFailedError): mock_deployer.wait_for_execute(stack_name, changeset_type) waiter_config = { 'Delay': 30, 'MaxAttempts': 120, } mock_waiter.wait.assert_called_once_with(StackName=stack_name, WaiterConfig=waiter_config) mock_client.get_waiter.assert_called_once_with("stack_create_complete")
def test_invokes_digest_validator(self): start_date = START_DATE end_date = END_DATE key_name = end_date.strftime(DATE_FORMAT) + '.json.gz' digest = { 'digestPublicKeyFingerprint': 'a', 'digestS3Bucket': '1', 'digestS3Object': key_name, 'previousDigestSignature': '...', 'digestStartTime': (end_date - timedelta(hours=1)).strftime(DATE_FORMAT), 'digestEndTime': end_date.strftime(DATE_FORMAT) } digest_provider = mock.Mock() digest_provider.load_digest_keys_in_range.return_value = [key_name] digest_provider.fetch_digest.return_value = (digest, key_name) key_provider = mock.Mock() public_keys = {'a': {'Fingerprint': 'a', 'Value': 'a'}} key_provider.get_public_keys.return_value = public_keys digest_validator = mock.Mock() traverser = DigestTraverser(digest_provider=digest_provider, starting_bucket='1', starting_prefix='baz', public_key_provider=key_provider, digest_validator=digest_validator) digest_iter = traverser.traverse(start_date, end_date) self.assertEqual(digest, next(digest_iter)) digest_validator.validate.assert_called_with('1', key_name, public_keys['a']['Value'], digest, key_name)
def test_generate_json_skeleton_with_timestamp(self): parsed_args = mock.Mock() parsed_args.generate_cli_skeleton = 'input' input_shape = { 'A': { 'type': 'structure', 'members': { 'B': { 'type': 'timestamp' }, } } } shape = DenormalizedStructureBuilder().with_members( input_shape).build_model() operation_model = mock.Mock(input_shape=shape) argument = GenerateCliSkeletonArgument(self.session, operation_model) with mock.patch('sys.stdout', six.StringIO()) as mock_stdout: rc = argument.generate_json_skeleton(call_parameters=None, parsed_args=parsed_args, parsed_globals=None) self.assertEqual( '{\n' ' "A": {\n' ' "B": "1970-01-01T00:00:00"\n' ' }\n' '}\n', mock_stdout.getvalue()) self.assertEqual(rc, 0)
def setUp(self): self.client = mock.Mock() self.emitter = HierarchicalEmitter() self.client.meta.events = self.emitter self.date_parser = mock.Mock() self.date_parser.return_value = mock.sentinel.now self.responses = []
def setUp(self): waiter = mock.Mock() waiter.wait.return_value = {} mock_cd = mock.Mock() mock_cd.get_waiter.return_value = waiter self.deployer = CodeDeployer(mock_cd, self.TEST_APPSPEC)
def setUp(self): self.session = mock.Mock() self.session.get_scoped_config.return_value = {} self.parsed_args = FakeArgs( template_file='./foo', stack_name="some_stack_name", parameter_overrides=["Key1=Value1", "Key2=Value2"], no_execute_changeset=False, execute_changeset=True, disable_rollback=True, capabilities=None, role_arn=None, notification_arns=[], fail_on_empty_changeset=True, s3_bucket=None, s3_prefix="some prefix", kms_key_id="some kms key id", force_upload=True, tags=["tagkey1=tagvalue1"]) self.parsed_globals = FakeArgs(region="us-east-1", endpoint_url=None, verify_ssl=None) self.deploy_command = DeployCommand(self.session) self.deployer = Deployer(mock.Mock()) self.deployer.create_and_wait_for_changeset = mock.Mock() self.deployer.execute_changeset = mock.Mock() self.deployer.wait_for_execute = mock.Mock()
def setUp(self): self.sync_strategy = mock.Mock() self.not_at_src_sync_strategy = mock.Mock() self.not_at_dest_sync_strategy = mock.Mock() self.comparator = Comparator(self.sync_strategy, self.not_at_dest_sync_strategy, self.not_at_src_sync_strategy)
def test_subscribe_when_bucket_needs_to_be_created(self): # TODO: fix this patch when we have a better way to stub out responses with mock.patch('botocore.endpoint.Endpoint._send') as \ http_session_send_patch: # Mock for HeadBucket request head_bucket_response = mock.Mock() head_bucket_response.status_code = 404 head_bucket_response.content = b'' head_bucket_response.headers = {} # Mock for CreateBucket request create_bucket_response = mock.Mock() create_bucket_response.status_code = 200 create_bucket_response.content = b'' create_bucket_response.headers = {} http_session_send_patch.side_effect = [ head_bucket_response, create_bucket_response ] s3_client = self.driver.session.create_client('s3') bucket_helper = S3BucketHelper(s3_client) bucket_helper.prepare_bucket('mybucket') send_call_list = http_session_send_patch.call_args_list self.assertEqual(send_call_list[0][0][0].method, 'HEAD') # Since the HeadObject fails with 404, the CreateBucket which is # is a PUT request should be made. self.assertEqual(send_call_list[1][0][0].method, 'PUT')
def test_choose_sync_strategy_overwrite(self): session = mock.Mock() cmd_arc = CommandArchitecture(session, 'sync', { 'region': 'us-east-1', 'endpoint_url': None, 'verify_ssl': None }) # Check that the default sync strategy is overwritten if a plugin # returns its sync strategy. mock_strategy = mock.Mock() mock_strategy.sync_type = 'file_at_src_and_dest' mock_not_at_dest_sync_strategy = mock.Mock() mock_not_at_dest_sync_strategy.sync_type = 'file_not_at_dest' mock_not_at_src_sync_strategy = mock.Mock() mock_not_at_src_sync_strategy.sync_type = 'file_not_at_src' responses = [(None, mock_strategy), (None, mock_not_at_dest_sync_strategy), (None, mock_not_at_src_sync_strategy)] session.emit.return_value = responses sync_strategies = cmd_arc.choose_sync_strategies() self.assertEqual(sync_strategies['file_at_src_and_dest_sync_strategy'], mock_strategy) self.assertEqual(sync_strategies['file_not_at_dest_sync_strategy'], mock_not_at_dest_sync_strategy) self.assertEqual(sync_strategies['file_not_at_src_sync_strategy'], mock_not_at_src_sync_strategy)
def test_main(self, mock_yaml_dump): exported_template_str = "hello" self.package_command.write_output = mock.Mock() self.package_command._export = mock.Mock() mock_yaml_dump.return_value = exported_template_str # Create a temporary file and make this my template with tempfile.NamedTemporaryFile() as handle: for use_json in (False, True): filename = handle.name self.parsed_args.template_file = filename self.parsed_args.use_json = use_json rc = self.package_command._run_main(self.parsed_args, self.parsed_globals) self.assertEqual(rc, 0) self.package_command._export.assert_called_once_with( filename, use_json) self.package_command.write_output.assert_called_once_with( self.parsed_args.output_template_file, mock.ANY) self.package_command._export.reset_mock() self.package_command.write_output.reset_mock()
def test_write(self): self.node.add_child(mock.Mock()) self.node.add_child(mock.Mock()) self.node.write(mock.Mock()) for child in self.node.children: self.assertTrue(child.write.called)
def setUp(self): self.index = InMemoryIndex({ 'command_names': { '': ['aws'], 'aws': ['ddb'], 'aws.ddb': ['put', 'select'], }, 'arg_names': { '': {}, 'aws.ddb': { 'put': ['table_name'], 'select': ['table_name'] }, }, 'arg_data': { '': {}, 'aws.ddb': { 'put': { 'table_name': ( 'table_name', 'string', 'put', 'aws.ddb.', None, True), }, 'select': { 'table_name': ( 'table_name', 'string', 'select', 'aws.ddb.', None, True), }, } } }) self.parser = parser.CLIParser(self.index) self.mock_client = mock.Mock() self.mock_create_client = mock.Mock() self.mock_create_client.create_client.return_value = self.mock_client self.completer = TableNameCompleter(self.mock_create_client)
def test_creates_traverser_and_gets_organization_id(self): cloudtrail_client = mock.Mock() cloudtrail_client.describe_trails.return_value = { 'trailList': [{ 'TrailARN': TEST_TRAIL_ARN, 'S3BucketName': 'bucket', 'S3KeyPrefix': 'prefix', 'IsOrganizationTrail': True }] } organization_client = mock.Mock() organization_client.describe_organization.return_value = { "Organization": { "MasterAccountId": TEST_ACCOUNT_ID, "Id": TEST_ORGANIZATION_ID, } } traverser = create_digest_traverser( trail_arn=TEST_TRAIL_ARN, trail_source_region='us-east-1', cloudtrail_client=cloudtrail_client, organization_client=organization_client, s3_client_provider=mock.Mock(), account_id=TEST_ACCOUNT_ID) self.assertEqual('bucket', traverser.starting_bucket) self.assertEqual('prefix', traverser.starting_prefix) digest_provider = traverser.digest_provider self.assertEqual('us-east-1', digest_provider.trail_home_region) self.assertEqual('foo', digest_provider.trail_name) self.assertEqual(TEST_ORGANIZATION_ID, digest_provider.organization_id)
def setUp(self): self.mock_sso_provider = mock.Mock( spec=botocore.credentials.SSOProvider) self.mock_resolver = mock.Mock(botocore.credentials.CredentialResolver) self.mock_resolver.get_provider.return_value = self.mock_sso_provider self.session = mock.Mock(spec=botocore.session.Session) self.session.get_component.return_value = self.mock_resolver
def test_ensures_digest_from_same_location_as_json_contents(self): start_date = START_DATE end_date = END_DATE callback, collected = collecting_callback() key_name = end_date.strftime(DATE_FORMAT) + '.json.gz' digest = { 'digestPublicKeyFingerprint': 'a', 'digestS3Bucket': 'not_same', 'digestS3Object': key_name, 'digestEndTime': end_date.strftime(DATE_FORMAT) } digest_provider = mock.Mock() digest_provider.load_digest_keys_in_range.return_value = [key_name] digest_provider.fetch_digest.return_value = (digest, key_name) key_provider = mock.Mock() digest_validator = mock.Mock() traverser = DigestTraverser(digest_provider=digest_provider, starting_bucket='1', starting_prefix='baz', public_key_provider=key_provider, digest_validator=digest_validator, on_invalid=callback) digest_iter = traverser.traverse(start_date, end_date) self.assertIsNone(next(digest_iter, None)) self.assertEqual(1, len(collected)) self.assertEqual( 'Digest file\ts3://1/%s\tINVALID: invalid format' % key_name, collected[0]['message'])
def setUp(self): self.session = mock.Mock() self.servicecatalog_client = mock.Mock() self.s3_client = mock.Mock() self.session.create_client.side_effect = [ self.s3_client, self.servicecatalog_client ] self.session.get_available_regions.return_value = [ 'us-east-1', 'eu-west-1' ] self.cmd = GenerateProvisioningArtifactCommand(self.session) self.args = Namespace() self.args.file_path = 'foo-file-path' self.args.bucket_name = 'foo-bucket-name' self.args.provisioning_artifact_name = 'foo-pa-name' self.args.provisioning_artifact_description = 'foo-pa-desc' self.args.provisioning_artifact_type = 'CLOUD_FORMATION_TEMPLATE' self.args.product_id = 'prod-1234567890abc' self.s3_url = "https://s3.amazonaws.com/foo-bucket-name/foo-file-path" # set global args self.global_args = Namespace() self.global_args.region = 'us-east-1' self.global_args.endpoint_url = None self.global_args.verify_ssl = None
def setUp(self): self.session = mock.Mock() # Create a mock service operation object self.service_operation = mock.Mock() # Make an arbitrary input model shape. self.input_shape = { 'A': { 'type': 'structure', 'members': { 'B': { 'type': 'string' }, } } } shape = DenormalizedStructureBuilder().with_members( self.input_shape).build_model() self.operation_model = mock.Mock(input_shape=shape) self.argument = GenerateCliSkeletonArgument(self.session, self.operation_model) # This is what the json should should look like after being # generated to standard output. self.ref_json_output = \ '{\n "A": {\n "B": ""\n }\n}\n'
def setUp(self): self._correct_cluster_entry = OrderedDict([ ("cluster", OrderedDict([ ("certificate-authority-data", describe_cluster_response()\ ["cluster"]["certificateAuthority"]["data"]), ("server", describe_cluster_response()["cluster"]["endpoint"]) ])), ("name", describe_cluster_response()["cluster"]["arn"]) ]) self._correct_user_entry = OrderedDict([ ("name", describe_cluster_response()["cluster"]["arn"]), ("user", OrderedDict([("exec", OrderedDict([("apiVersion", API_VERSION), ("args", [ "--region", "region", "eks", "get-token", "--cluster-name", "ExampleCluster" ]), ("command", "aws")]))])) ]) self._mock_client = mock.Mock() self._mock_client.describe_cluster.return_value =\ describe_cluster_response() self._session = mock.Mock(spec=botocore.session.Session) self._session.create_client.return_value = self._mock_client self._session.profile = None self._client = EKSClient(self._session, "ExampleCluster", None)
def test_wait_for_changeset_failed_to_create_changeset(self): stack_name = "stack_name" changeset_id = "changeset-id" mock_client = mock.Mock() mock_deployer = Deployer(mock_client) mock_waiter = mock.Mock() mock_client.get_waiter.return_value = mock_waiter response = {"Status": "FAILED", "StatusReason": "some reason"} waiter_error = botocore.exceptions.WaiterError(name="name", reason="reason", last_response=response) mock_waiter.wait.side_effect = waiter_error with self.assertRaises(RuntimeError): mock_deployer.wait_for_changeset(changeset_id, stack_name) waiter_config = {'Delay': 5} mock_waiter.wait.assert_called_once_with(ChangeSetName=changeset_id, StackName=stack_name, WaiterConfig=waiter_config) mock_client.get_waiter.assert_called_once_with( "change_set_create_complete")
def test_fails_when_digest_metadata_is_missing(self): key = MockDigestProvider([]).get_key_at_position(1) self.parsed_responses = [ { 'LocationConstraint': '' }, { 'Contents': [{ 'Key': key }] }, { 'Body': six.BytesIO(_gz_compress(self._logs[0]['_raw_value'])), 'Metadata': {} }, ] s3_client_provider = S3ClientProvider(self.driver.session, 'us-east-1') digest_provider = DigestProvider(s3_client_provider, TEST_ACCOUNT_ID, 'foo', 'us-east-1') key_provider = mock.Mock() key_provider.get_public_keys.return_value = { 'a': { 'Value': VALID_TEST_KEY } } _setup_mock_traverser(self._mock_traverser, key_provider, digest_provider, mock.Mock()) stdout, stderr, rc = self.run_cmd( ("cloudtrail validate-logs --trail-arn %s --start-time %s " "--region us-east-1") % (TEST_TRAIL_ARN, START_TIME_ARG), 1) self.assertIn( 'Digest file\ts3://1/%s\tINVALID: signature verification failed' % key, stderr)
def test_can_run_apicall_step(self): loaded = load_wizard(""" plan: start: values: foo: type: apicall operation: iam.ListPolicies params: Scope: AWS """) mock_session = mock.Mock(spec=Session) mock_client = mock.Mock() mock_session.create_client.return_value = mock_client mock_client.list_policies.return_value = { 'Policies': ['foo'], } api_step = core.APICallStep( api_invoker=core.APIInvoker(session=mock_session) ) planner = core.Planner( step_handlers={ 'apicall': api_step, }, ) parameters = planner.plan(loaded['plan']) self.assertEqual(parameters['foo'], {'Policies': ['foo']})
def setUp(self): self.index = InMemoryIndex({ 'command_names': { '': ['aws'], 'aws': ['logs'], 'aws.logs': ['tail'], }, 'arg_names': { '': {}, 'aws.logs': { 'tail': ['group_name'], }, }, 'arg_data': { '': {}, 'aws.logs': { 'tail': { 'group_name': ('group_name', 'string', 'tail', 'aws.logs.', None, True), }, } } }) self.parser = parser.CLIParser(self.index) self.mock_client = mock.Mock() self.mock_create_client = mock.Mock() self.mock_create_client.create_client.return_value = self.mock_client self.completer = GroupNameCompleter(self.mock_create_client)
def setUp(self): self.session = mock.Mock() self.service_model = mock.Mock() # Create some waiters. self.model = WaiterModel({ 'version': 2, 'waiters': { 'InstanceRunning': { 'description': 'My waiter description.', 'delay': 1, 'maxAttempts': 10, 'operation': 'MyOperation', }, 'BucketExists': { 'description': 'My waiter description.', 'operation': 'MyOperation', 'delay': 1, 'maxAttempts': 10, } } }) self.waiter_builder = WaiterStateCommandBuilder( self.session, self.model, self.service_model )
def setUp(self): super(TestShouldEnablePagination, self).setUp() self.parsed_globals = mock.Mock() self.parsed_args = mock.Mock() self.parsed_args.starting_token = None self.parsed_args.page_size = None self.parsed_args.max_items = None
def test_service_object_to_waiter_model(self): service_model = mock.Mock() session = mock.Mock() service_model.service_name = 'service' service_model.api_version = '2014-01-01' get_waiter_model_from_service_model(session, service_model) session.get_waiter_model.assert_called_with('service', '2014-01-01')
def test_file_exists(self): key = "some/path" expected_params = {"Bucket": self.bucket_name, "Key": key} response = { "AcceptRanges": "bytes", "ContentType": "text/html", "LastModified": "Thu, 16 Apr 2015 18:19:14 GMT", "ContentLength": 77, "VersionId": "null", "ETag": "\"30a6ec7e1a9ad79c203d05a589c8b400\"", "Metadata": {} } # Let's pretend file exists self.s3client_stub.add_response("head_object", response, expected_params) with self.s3client_stub: self.assertTrue(self.s3uploader.file_exists(key)) # Let's pretend file does not exist self.s3client_stub.add_client_error('head_object', "ClientError", "some error") with self.s3client_stub: self.assertFalse(self.s3uploader.file_exists(key)) # Let's pretend some other unknown exception happened s3mock = mock.Mock() uploader = S3Uploader(s3mock, self.bucket_name) s3mock.head_object = mock.Mock() s3mock.head_object.side_effect = RuntimeError() with self.assertRaises(RuntimeError): uploader.file_exists(key)