def _mock_get_zookeeper_hosts(self, hosts=None): """ Try to encapsulate a variety of behaviors here.. if hosts is None, a default host is used. if hosts is False, no s3 state is returned if hosts are passed as a list of instances, they are returned. """ if hosts is None: hosts = [self.get_instance( "i-es-zoo", private_dns_name="es.example.internal")] self.s3.get_object(self.env_name, "provider-state") if hosts is False: error = S3Error("<error/>", 404) error.errors = [{"Code": "NoSuchKey"}] self.mocker.result(fail(error)) return state = dump({ "zookeeper-instances": [i.instance_id for i in hosts]}) self.mocker.result(succeed(state)) if hosts: # connect grabs the first host of a set. self.ec2.describe_instances(hosts[0].instance_id) self.mocker.result(succeed([hosts[0]]))
def test_save_non_existant_bucket(self): """ When saving instance information to S3 the EC2 provider will create a namespaced bucket specific to the provider instance, if it does not already exist. """ instances = [self.get_instance("i-foobar", dns_name="x1.example.com")] state = dump({ "zookeeper-instances": [[i.instance_id, i.dns_name] for i in instances] }) self.s3.put_object(self.env_name, "provider-state", state), error = S3Error("<error/>", 404) error.errors = [{"Code": "NoSuchBucket"}] self.mocker.result(fail(error)) self.s3.create_bucket(self.env_name) self.mocker.result(succeed({})) self.s3.put_object(self.env_name, "provider-state", state), self.mocker.result(succeed(state)) self.mocker.replay() provider = self.get_provider() d = provider.save_state({ "zookeeper-instances": [[i.instance_id, i.dns_name] for i in instances] }) def assert_state(saved_state): self.assertEqual(saved_state, state) d.addCallback(assert_state) return d
def test_no_state(self): """ When loading saved state from S3, the provider method gracefully handles the scenario where there is no saved state. """ error = S3Error("<error/>", 404) error.errors = [{"Code": "NoSuchKey"}] return self.verify_no_environment(fail(error))
def test_get_file_error(self): """ An unexpected error from s3 on file retrieval is exposed via the api. """ control_bucket = self.get_config()["control-bucket"] file_name = "pirates/ship.txt" self.s3.get_object(control_bucket, file_name) self.mocker.result(fail(S3Error("<error/>", 503))) self.mocker.replay() storage = self.get_storage() d = storage.get(file_name) self.failUnlessFailure(d, S3Error) return d
def get_bucket(self, bucket, marker=None, max_keys=None, prefix=None): try: pieces = self._state.buckets[bucket] except KeyError: return fail(S3Error("<nosuchbucket/>", 400)) listing = pieces["listing"] if max_keys is None: max_keys = 1000 if prefix is None: prefix = b"" if isinstance(prefix, str): prefix = prefix.encode() if marker is None: keys_after = b"" else: keys_after = marker if isinstance(keys_after, str): keys_after = keys_after.encode() prefixed_contents = ( content for content in sorted(listing.contents, key=lambda item: item.key) if (content.key if isinstance(content.key, bytes) else content.key.encode()).startswith(prefix) and (content.key if isinstance(content.key, bytes) else content.key.encode()) > keys_after ) contents = list(islice(prefixed_contents, max_keys)) is_truncated = "false" for ignored in prefixed_contents: is_truncated = "true" break listing = attr.assoc( listing, contents=contents, prefix=prefix, is_truncated=is_truncated, marker=marker, ) return succeed(listing)
def test_load_nonexistant_bucket(self): """ When loading saved state from s3, the system returns False if the s3 control bucket does not exist. """ self.s3.get_object(self.env_name, "provider-state") error = S3Error("<error/>", 404) error.errors = [{"Code": "NoSuchBucket"}] self.mocker.result(fail(error)) self.mocker.replay() provider = self.get_provider() d = provider.load_state() def assert_load_value(value): self.assertIdentical(value, False) d.addCallback(assert_load_value) return d
def delete_bucket(self, bucket): if self._state.buckets[bucket]["listing"].contents: return fail(S3Error("<notempty/>", 400)) del self._state.buckets[bucket] return succeed(None)
def g(self, *a, **kw): if self._state.get_rate_limit_exceeded(): return fail(S3Error("<slowdown/>", 400)) return f(self, *a, **kw)
def test_internal_error_result(self): error = S3Error(payload.sample_server_internal_error_result) self.assertEquals( error.get_error_messages(), "We encountered an internal error. Please try again.")
def test_invalid_access_key_result(self): error = S3Error(payload.sample_s3_invalid_access_key_result) self.assertEquals( error.get_error_messages(), ("The AWS Access Key Id you provided does not exist in our " "records."))
def test_signature_mismatch_result(self): error = S3Error(payload.sample_s3_signature_mismatch) self.assertEquals( error.get_error_messages(), ("The request signature we calculated does not match the " "signature you provided. Check your key and signing method."))
def test_error_repr(self): error = S3Error(payload.sample_s3_invalid_access_key_result) self.assertEquals( repr(error), "<S3Error object with Error code: InvalidAccessKeyId>")
def test_error_count(self): error = S3Error(payload.sample_s3_invalid_access_key_result) self.assertEquals(len(error.errors), 1)
def test_get_error_code(self): error = S3Error(payload.sample_s3_invalid_access_key_result) self.assertEquals(error.get_error_code(), "InvalidAccessKeyId")
def test_set_400_error(self): xml = "<Error><Code>1</Code><Message>2</Message></Error>" error = S3Error("<dummy />") error._set_400_error(XML(xml)) self.assertEquals(error.errors[0]["Code"], "1") self.assertEquals(error.errors[0]["Message"], "2")