def test_create_resource_with_config_override_user_agent_extra(self): mock_bc_session = mock.Mock() loader = mock.Mock(spec=loaders.Loader) loader.determine_latest_version.return_value = '2014-11-02' loader.load_service_model.return_value = { 'resources': [], 'service': [], } mock_bc_session.get_component.return_value = loader session = Session(botocore_session=mock_bc_session) session.resource_factory.load_from_definition = mock.Mock() session.client = mock.Mock() config = Config(signature_version='v4', user_agent_extra='foo') session.resource('sqs', config=config) session.client.assert_called_with( 'sqs', aws_secret_access_key=None, aws_access_key_id=None, endpoint_url=None, use_ssl=True, aws_session_token=None, verify=None, region_name=None, api_version='2014-11-02', config=mock.ANY, ) client_config = session.client.call_args[1]['config'] assert client_config.user_agent_extra == 'foo' assert client_config.signature_version == 'v4'
def test_create_resource_latest_version(self): mock_bc_session = mock.Mock() loader = mock.Mock(spec=loaders.Loader) loader.determine_latest_version.return_value = '2014-11-02' loader.load_service_model.return_value = { 'resources': [], 'service': [], } mock_bc_session.get_component.return_value = loader session = Session(botocore_session=mock_bc_session) session.resource_factory.load_from_definition = mock.Mock() session.resource('sqs') loader.load_service_model.assert_called_with('sqs', 'resources-1', None)
def test_bad_resource_name_with_no_client_has_simple_err_msg(self): mock_bc_session = mock.Mock() loader = mock.Mock(spec=loaders.Loader) loader.load_service_model.side_effect = UnknownServiceError( service_name='foo', known_service_names='asdf') mock_bc_session.get_component.return_value = loader loader.list_available_services.return_value = ['good-resource'] mock_bc_session.get_available_services.return_value = ['good-client'] session = Session(botocore_session=mock_bc_session) with self.assertRaises(ResourceNotExistsError) as e: session.resource('bad-client') err_msg = str(e.exception) # Shouldn't mention anything about clients because # 'bad-client' it not a valid ibm_boto3.client(...) self.assertNotIn('ibm_boto3.client', err_msg)
def test_bad_resource_name(self): mock_bc_session = mock.Mock() loader = mock.Mock(spec=loaders.Loader) loader.load_service_model.side_effect = UnknownServiceError( service_name='foo', known_service_names='asdf') mock_bc_session.get_component.return_value = loader loader.list_available_services.return_value = ['good-resource'] mock_bc_session.get_available_services.return_value = ['sqs'] session = Session(botocore_session=mock_bc_session) with self.assertRaises(ResourceNotExistsError) as e: session.resource('sqs') err_msg = str(e.exception) # 1. should say the resource doesn't exist. self.assertIn('resource does not exist', err_msg) self.assertIn('sqs', err_msg) # 2. Should list available resources you can choose. self.assertIn('good-resource', err_msg) # 3. Should list client if available. self.assertIn('client', err_msg)
class TestCollection(unittest.TestCase): def setUp(self): self.session = Session(aws_access_key_id='dummy', aws_secret_access_key='dummy', region_name='us-east-1') # Pick an arbitrary resource. self.s3_resource = self.session.resource('s3') def test_can_use_collection_methods(self): self.assertIsInstance(self.s3_resource.instances.all(), ResourceCollection) def test_can_chain_methods(self): self.assertIsInstance(self.s3_resource.instances.all().page_size(5), ResourceCollection)
class BaseDocsTest(unittest.TestCase): def setUp(self): self.root_dir = tempfile.mkdtemp() self.version_dirs = os.path.join(self.root_dir, 'myservice', '2014-01-01') os.makedirs(self.version_dirs) self.model_file = os.path.join(self.version_dirs, 'service-2.json') self.waiter_model_file = os.path.join(self.version_dirs, 'waiters-2.json') self.paginator_model_file = os.path.join(self.version_dirs, 'paginators-1.json') self.resource_model_file = os.path.join(self.version_dirs, 'resources-1.json') self.example_model_file = os.path.join(self.version_dirs, 'examples-1.json') self.json_model = {} self.waiter_json_model = {} self.paginator_json_model = {} self.resource_json_model = {} self._setup_models() self.doc_name = 'MyDoc' self.doc_structure = DocumentStructure(self.doc_name) self.setup_client_and_resource() def tearDown(self): shutil.rmtree(self.root_dir) def setup_client_and_resource(self): self._write_models() self.loader = Loader(extra_search_paths=[self.root_dir]) self.botocore_session = ibm_botocore.session.get_session() self.botocore_session.register_component('data_loader', self.loader) self.session = Session(botocore_session=self.botocore_session, region_name='us-east-1') self.client = self.session.client('myservice', 'us-east-1') self.resource = self.session.resource('myservice', 'us-east-1') def _setup_models(self): self.json_model = { 'metadata': { 'apiVersion': '2014-01-01', 'endpointPrefix': 'myservice', 'signatureVersion': 'v4', 'serviceFullName': 'AWS MyService', 'protocol': 'query', 'serviceId': 'MyService', }, 'operations': { 'SampleOperation': { 'name': 'SampleOperation', 'input': { 'shape': 'SampleOperationInputOutput' }, 'output': { 'shape': 'SampleOperationInputOutput' } } }, 'shapes': { 'SampleOperationInputOutput': { 'type': 'structure', 'members': OrderedDict([ ('Foo', { 'shape': 'String', 'documentation': 'Documents Foo' }), ('Bar', { 'shape': 'String', 'documentation': 'Documents Bar' }), ]) }, 'String': { 'type': 'string' } } } self.example_json_model = { "version": 1, "examples": { "SampleOperation": [{ "id": "sample-id", "title": "sample-title", "description": "Sample Description.", "input": OrderedDict([ ("Foo", "bar"), ]), "comments": { "input": { "Foo": "biz" }, } }] } } self.waiter_json_model = { "version": 2, "waiters": { "SampleOperationComplete": { "delay": 15, "operation": "SampleOperation", "maxAttempts": 40, "acceptors": [{ "expected": "complete", "matcher": "pathAll", "state": "success", "argument": "Biz" }, { "expected": "failed", "matcher": "pathAny", "state": "failure", "argument": "Biz" }] } } } self.paginator_json_model = { "pagination": { "SampleOperation": { "input_token": "NextResult", "output_token": "NextResult", "limit_key": "MaxResults", "result_key": "Biz" } } } self.resource_json_model = { "service": { "actions": OrderedDict([("SampleOperation", { "request": { "operation": "SampleOperation" } }), ("SampleListReturnOperation", { "request": { "operation": "SampleOperation" }, "resource": { "type": "Sample", "identifiers": [{ "target": "Name", "source": "response", "path": "Samples[].Name" }], "path": "Samples[]" } })]), "has": { "Sample": { "resource": { "type": "Sample", "identifiers": [{ "target": "Name", "source": "input" }] } } }, "hasMany": { "Samples": { "request": { "operation": "SampleOperation" }, "resource": { "type": "Sample", "identifiers": [{ "target": "Name", "source": "response", "path": "Samples[].Foo" }] } } } }, "resources": { "Sample": { "identifiers": [{ "name": "Name", "memberName": "Foo" }], "shape": "SampleOperationInputOutput", "load": { "request": { "operation": "SampleOperation", "params": [{ "target": "Foo", "source": "identifier", "name": "Name" }] } }, "actions": { "Operate": { "request": { "operation": "SampleOperation", "params": [{ "target": "Foo", "source": "identifier", "name": "Name" }] } } }, "batchActions": { "Operate": { "request": { "operation": "SampleOperation", "params": [{ "target": "Samples[].Foo", "source": "identifier", "name": "Name" }] } } }, "has": { "RelatedSample": { "resource": { "type": "Sample", "identifiers": [{ "target": "Name", "source": "data", "path": "Foo" }] } } }, "waiters": { "Complete": { "waiterName": "SampleOperationComplete", "params": [{ "target": "Foo", "source": "identifier", "name": "Name" }] } } } } } def _write_models(self): with open(self.resource_model_file, 'w') as f: json.dump(self.resource_json_model, f) with open(self.waiter_model_file, 'w') as f: json.dump(self.waiter_json_model, f) with open(self.paginator_model_file, 'w') as f: json.dump(self.paginator_json_model, f) with open(self.model_file, 'w') as f: json.dump(self.json_model, f) with open(self.example_model_file, 'w') as f: json.dump(self.example_json_model, f) def add_shape(self, shape): shape_name = list(shape.keys())[0] self.json_model['shapes'][shape_name] = shape[shape_name] def add_shape_to_params(self, param_name, shape_name, documentation=None, is_required=False): params_shape = self.json_model['shapes']['SampleOperationInputOutput'] member = {'shape': shape_name} if documentation is not None: member['documentation'] = documentation params_shape['members'][param_name] = member if is_required: required_list = params_shape.get('required', []) required_list.append(param_name) params_shape['required'] = required_list def assert_contains_lines_in_order(self, lines, contents=None): if contents is None: contents = self.doc_structure.flush_structure().decode('utf-8') for line in lines: self.assertIn(line, contents) beginning = contents.find(line) contents = contents[(beginning + len(line)):] def assert_not_contains_lines(self, lines): contents = self.doc_structure.flush_structure().decode('utf-8') for line in lines: self.assertNotIn(line, contents)
class CloudObjectStorage(): def __init__(self, api_key=None, instance_id=None, iam_endpoint=None, cos_endpoint=None): self.cos_endpoint = cos_endpoint self.session = Session( ibm_api_key_id=api_key, ibm_service_instance_id=instance_id, ibm_auth_endpoint=iam_endpoint) # The COS sdk call download_file() downloads to a local file. # If you have an object which implements file-like behavior # (e.g. it supports write() and can store bytes) # you can pass that into a call to # download_fileobj() instead of download_file(). # You would then need to change the implemention of put_file() # to call upload_fileobj() and pass in your object instead of # the file name. def get_file(self, bucket_name=None, file=None): cos = self.session.resource( service_name='s3', endpoint_url=self.cos_endpoint, config=Config(signature_version='oauth') ) response = cos.Bucket(bucket_name).download_file( Key=PurePath(file).name, Filename=file ) return response def put_file(self, bucket_name=None, file=None): cos = self.session.resource( service_name='s3', endpoint_url=self.cos_endpoint, config=Config(signature_version='oauth') ) cos.Bucket(bucket_name).upload_file(file, PurePath(file).name) def delete_file(self, bucket_name=None, file=None): cos = self.session.resource( service_name='s3', endpoint_url=self.cos_endpoint, config=Config(signature_version='oauth') ) response = cos.Bucket(bucket_name).delete_objects( Delete={ 'Objects': [ {'Key': file} ], 'Quiet': True }, MFA='string', RequestPayer='requester' ) if 'Errors' in response.keys(): raise COSError # get_files returns a dict. The key is the object key, # the value is a dict containing core object metadata. # See https://ibm.github.io/ibm-cos-sdk-python/reference/services/s3.html#S3.ObjectVersion def get_files_info(self, bucket_name=None): cos = self.session.resource( service_name='s3', endpoint_url=self.cos_endpoint, config=Config(signature_version='oauth') ) files = {} object_summaries = cos.Bucket(bucket_name).objects.all() for s in object_summaries: object = s.Object() files[object.key] = {'last_modified': object.last_modified, 'size': object.content_length, 'version': object.version_id} return files