def test_should_give_credentials(self, sts_mock): self.credentials_provider_mock.get_credentials_for_all_roles.return_value = DUMMY_CREDENTIALS given_credentials = Credentials() given_credentials.access_key = ANOTHER_KEY given_credentials.secret_key = ANOTHER_SECRET given_credentials.session_token = ANOTHER_TOKEN given_credentials.expiration = ANOTHER_EXPIRATION given_credentials_string = json.dumps({ "AccessKeyId": ANOTHER_KEY, "SecretAccessKey": ANOTHER_SECRET, "Token": ANOTHER_TOKEN, "Expiration": ANOTHER_EXPIRATION }) sts_mock.return_value.assume_role.return_value = AssumedRole(credentials=given_credentials) credentials = self.provider.get_credentials_for_all_roles() sts_mock.assert_called_with("fo-bar-1", aws_access_key_id="ACCESS_KEY", aws_secret_access_key="SECRET", security_token="MY_TOKEN", proxy=None, proxy_port=None) self.assertEqual(given_credentials_string, credentials[ROLE])
def test_to_dict(self): # This would fail miserably if ``Credentials.request_id`` hadn't been # explicitly set (no default). # Default. self.assertEqual(self.creds.to_dict(), { 'access_key': None, 'expiration': None, 'request_id': None, 'secret_key': None, 'session_token': None }) # Override. creds = Credentials() creds.access_key = 'something' creds.secret_key = 'crypto' creds.session_token = 'this' creds.expiration = 'way' creds.request_id = 'comes' self.assertEqual(creds.to_dict(), { 'access_key': 'something', 'expiration': 'way', 'request_id': 'comes', 'secret_key': 'crypto', 'session_token': 'this' })
def authenticate(self, account, user, passwd, new_passwd=None): try: req = urllib2.Request(self.auth_url) if new_passwd: auth_string = "%s@%s;%s@%s" % \ (base64.b64encode(user), \ base64.b64encode(account), \ base64.b64encode(passwd), \ new_passwd) else: auth_string = "%s@%s:%s" % \ (base64.b64encode(user), \ base64.b64encode(account), \ passwd) encoded_auth = base64.b64encode(auth_string) req.add_header('Authorization', "Basic %s" % encoded_auth) response = urllib2.urlopen(req, timeout=15) body = response.read() # parse AccessKeyId, SecretAccessKey and SessionToken creds = Credentials(None) h = boto.handler.XmlHandler(creds, None) xml.sax.parseString(body, h) logging.info("authenticated user: "******"/" + user) return creds except urllib2.URLError, err: # this returned for authorization problem # HTTP Error 401: Unauthorized # HTTP Error 403: Forbidden (when password has expired) if issubclass(err.__class__, urllib2.HTTPError): raise eucaconsole.EuiException(err.code, 'Not Authorized') # this returned for connection problem (i.e. timeout) # <urlopen error [Errno 61] Connection refused> if issubclass(err.__class__, urllib2.URLError): raise eucaconsole.EuiException(504, 'Timed out')
def test_session_token(self): print('--- running Session Token tests ---') c = STSConnection() # Create a session token token = c.get_session_token() # Save session token to a file token.save('token.json') # Now load up a copy of that token token_copy = Credentials.load('token.json') assert token_copy.access_key == token.access_key assert token_copy.secret_key == token.secret_key assert token_copy.session_token == token.session_token assert token_copy.expiration == token.expiration assert token_copy.request_id == token.request_id os.unlink('token.json') assert not token.is_expired() # Try using the session token with S3 s3 = S3Connection(aws_access_key_id=token.access_key, aws_secret_access_key=token.secret_key, security_token=token.session_token) buckets = s3.get_all_buckets() print('--- tests completed ---')
def test_session_token(self): print '--- running Session Token tests ---' c = STSConnection() # Create a session token token = c.get_session_token() # Save session token to a file token.save('token.json') # Now load up a copy of that token token_copy = Credentials.load('token.json') assert token_copy.access_key == token.access_key assert token_copy.secret_key == token.secret_key assert token_copy.session_token == token.session_token assert token_copy.expiration == token.expiration assert token_copy.request_id == token.request_id os.unlink('token.json') assert not token.is_expired() # Try using the session token with S3 s3 = S3Connection(aws_access_key_id=token.access_key, aws_secret_access_key=token.secret_key, security_token=token.session_token) buckets = s3.get_all_buckets() print '--- tests completed ---'
class STSCredentialsTest(unittest.TestCase): sts = True def setUp(self): super(STSCredentialsTest, self).setUp() self.creds = Credentials() def test_to_dict(self): # This would fail miserably if ``Credentials.request_id`` hadn't been # explicitly set (no default). # Default. self.assertEqual(self.creds.to_dict(), { 'access_key': None, 'expiration': None, 'request_id': None, 'secret_key': None, 'session_token': None }) # Override. creds = Credentials() creds.access_key = 'something' creds.secret_key = 'crypto' creds.session_token = 'this' creds.expiration = 'way' creds.request_id = 'comes' self.assertEqual(creds.to_dict(), { 'access_key': 'something', 'expiration': 'way', 'request_id': 'comes', 'secret_key': 'crypto', 'session_token': 'this' })
def test_should_give_credentials(self, sts_mock): self.credentials_provider_mock.get_credentials_for_all_roles.return_value = DUMMY_CREDENTIALS given_credentials = Credentials() given_credentials.access_key = ANOTHER_KEY given_credentials.secret_key = ANOTHER_SECRET given_credentials.session_token = ANOTHER_TOKEN given_credentials.expiration = ANOTHER_EXPIRATION given_credentials_string = json.dumps({ "AccessKeyId": ANOTHER_KEY, "SecretAccessKey": ANOTHER_SECRET, "Token": ANOTHER_TOKEN, "Expiration": ANOTHER_EXPIRATION }) sts_mock.return_value.assume_role.return_value = AssumedRole(credentials=given_credentials) credentials = self.provider.get_credentials_for_all_roles() self.assertEqual(given_credentials_string, credentials[ROLE])
def authenticate(self, timeout=20): """ Make authentication request to AWS STS service Timeout defaults to 20 seconds""" req = urllib2.Request(self.endpoint, data=self.package) response = urllib2.urlopen(req, timeout=timeout) body = response.read() # parse AccessKeyId, SecretAccessKey and SessionToken creds = Credentials() h = BotoXmlHandler(creds, None) xml.sax.parseString(body, h) logging.info("Authenticated AWS user") return creds
def authenticate_aws(self, package): try: req = urllib2.Request('https://sts.amazonaws.com', package) response = urllib2.urlopen(req, timeout=20) body = response.read() # parse AccessKeyId, SecretAccessKey and SessionToken creds = Credentials(None) h = boto.handler.XmlHandler(creds, None) xml.sax.parseString(body, h) logging.info("authenticated aws user") return creds except urllib2.URLError, err: # this returned for authorization problem # HTTP Error 401: Unauthorized # HTTP Error 403: Forbidden (when password has expired) if issubclass(err.__class__, urllib2.HTTPError): raise eucaconsole.EuiException(err.code, 'Not Authorized') # this returned for connection problem (i.e. timeout) # <urlopen error [Errno 61] Connection refused> if issubclass(err.__class__, urllib2.URLError): raise eucaconsole.EuiException(504, 'Timed out')
def authenticate(self, account, user, passwd, new_passwd=None, timeout=15, duration=3600): if user == 'admin' and duration > 3600: # admin cannot have more than 1 hour duration duration = 3600 # because of the variability, we need to keep this here, not in __init__ self.auth_url = self.TEMPLATE.format( host=self.host, port=self.port, dur=duration, ) req = urllib2.Request(self.auth_url) if new_passwd: auth_string = "{user}@{account};{pw}@{new_pw}".format( user=base64.b64encode(user), account=base64.b64encode(account), pw=base64.b64encode(passwd), new_pw=new_passwd) else: auth_string = "{user}@{account}:{pw}".format( user=base64.b64encode(user), account=base64.b64encode(account), pw=passwd) encoded_auth = base64.b64encode(auth_string) req.add_header('Authorization', "Basic %s" % encoded_auth) response = urllib2.urlopen(req, timeout=timeout) body = response.read() # parse AccessKeyId, SecretAccessKey and SessionToken creds = Credentials() h = BotoXmlHandler(creds, None) xml.sax.parseString(body, h) logging.info("Authenticated Eucalyptus user: "******"/" + user) return creds
def authenticate(self, timeout=20): """ Make authentication request to AWS STS service Timeout defaults to 20 seconds""" if self.validate_certs: conn = CertValidatingHTTPSConnection(self.host, self.port, timeout=timeout, **self.kwargs) else: conn = httplib.HTTPSConnection(self.host, self.port, timeout=timeout) headers = {"Content-type": "application/x-www-form-urlencoded"} try: conn.request('POST', '', self.package, headers) response = conn.getresponse() if response.status != 200: raise urllib2.HTTPError(url='', code=response.status, msg=response.reason, hdrs=None, fp=None) body = response.read() # parse AccessKeyId, SecretAccessKey and SessionToken creds = Credentials() h = BotoXmlHandler(creds, None) parseString(body, h) return creds except SSLError as err: if err.message != '': raise urllib2.URLError(err.message) else: raise urllib2.URLError(err[1]) except socket.error as err: raise urllib2.URLError(err.message)
def authenticate(self, account, user, passwd): try: req = urllib2.Request(self.auth_url) auth_string = "%s@%s:%s" % \ (base64.b64encode(user), \ base64.b64encode(account), \ passwd) encoded_auth = base64.b64encode(auth_string.encode('utf8')) req.add_header('Authorization', "Basic %s" % encoded_auth) response = urllib2.urlopen(req, timeout=15) body = response.read() # parse AccessKeyId, SecretAccessKey and SessionToken creds = Credentials(None) h = boto.handler.XmlHandler(creds, None) xml.sax.parseString(body, h) logging.info("authenticated user: "******"/" + user) return creds except urllib2.URLError, err: traceback.print_exc(file=sys.stdout) if not (issubclass(err.__class__, urllib2.HTTPError)): if isinstance(err.reason, socket.timeout): raise server.EuiException(504, 'Timed out') raise server.EuiException(401, 'Not Authorized')
def setUp(self): super(STSCredentialsTest, self).setUp() self.creds = Credentials()
def test_layer1_basic(self): print '--- running DynamoDB Layer1 tests ---' # Create a Layer1 connection with an expired set of # credentials to test the automatic renewal of tokens bad_creds = Credentials.from_json(json_doc) c = Layer1(session_token=bad_creds) # First create a table table_name = 'test-%d' % int(time.time()) hash_key_name = 'forum_name' hash_key_type = 'S' range_key_name = 'subject' range_key_type = 'S' read_units = 5 write_units = 5 schema = { 'HashKeyElement': { 'AttributeName': hash_key_name, 'AttributeType': hash_key_type }, 'RangeKeyElement': { 'AttributeName': range_key_name, 'AttributeType': range_key_type } } provisioned_throughput = { 'ReadCapacityUnits': read_units, 'WriteCapacityUnits': write_units } result = c.create_table(table_name, schema, provisioned_throughput) assert result['TableDescription']['TableName'] == table_name result_schema = result['TableDescription']['KeySchema'] assert result_schema['HashKeyElement'][ 'AttributeName'] == hash_key_name assert result_schema['HashKeyElement'][ 'AttributeType'] == hash_key_type assert result_schema['RangeKeyElement'][ 'AttributeName'] == range_key_name assert result_schema['RangeKeyElement'][ 'AttributeType'] == range_key_type result_thruput = result['TableDescription']['ProvisionedThroughput'] assert result_thruput['ReadCapacityUnits'] == read_units assert result_thruput['WriteCapacityUnits'] == write_units # Wait for table to become active result = c.describe_table(table_name) while result['Table']['TableStatus'] != 'ACTIVE': time.sleep(5) result = c.describe_table(table_name) # List tables and make sure new one is there result = c.list_tables() assert table_name in result['TableNames'] # Update the tables ProvisionedThroughput new_read_units = 10 new_write_units = 5 new_provisioned_throughput = { 'ReadCapacityUnits': new_read_units, 'WriteCapacityUnits': new_write_units } result = c.update_table(table_name, new_provisioned_throughput) # Wait for table to be updated result = c.describe_table(table_name) while result['Table']['TableStatus'] == 'UPDATING': time.sleep(5) result = c.describe_table(table_name) result_thruput = result['Table']['ProvisionedThroughput'] assert result_thruput['ReadCapacityUnits'] == new_read_units assert result_thruput['WriteCapacityUnits'] == new_write_units # Put an item item1_key = 'Amazon DynamoDB' item1_range = 'DynamoDB Thread 1' item1_data = { hash_key_name: { hash_key_type: item1_key }, range_key_name: { range_key_type: item1_range }, 'Message': { 'S': 'DynamoDB thread 1 message text' }, 'LastPostedBy': { 'S': 'User A' }, 'Views': { 'N': '0' }, 'Replies': { 'N': '0' }, 'Answered': { 'N': '0' }, 'Tags': { 'SS': ["index", "primarykey", "table"] }, 'LastPostDateTime': { 'S': '12/9/2011 11:36:03 PM' } } result = c.put_item(table_name, item1_data) # Now do a consistent read and check results key1 = { 'HashKeyElement': { hash_key_type: item1_key }, 'RangeKeyElement': { range_key_type: item1_range } } result = c.get_item(table_name, key=key1, consistent_read=True) for name in item1_data: assert name in result['Item'] # Try to get an item that does not exist. invalid_key = { 'HashKeyElement': { hash_key_type: 'bogus_key' }, 'RangeKeyElement': { range_key_type: item1_range } } self.assertRaises(DynamoDBKeyNotFoundError, c.get_item, table_name, key=invalid_key) # Try retrieving only select attributes attributes = ['Message', 'Views'] result = c.get_item(table_name, key=key1, consistent_read=True, attributes_to_get=attributes) for name in result['Item']: assert name in attributes # Try to delete the item with the wrong Expected value expected = {'Views': {'Value': {'N': '1'}}} try: result = c.delete_item('table_name', key=key1, expected=expected) except c.ResponseError, e: pass
def test_layer1_basic(self): print '--- running DynamoDB Layer1 tests ---' # Create a Layer1 connection with an expired set of # credentials to test the automatic renewal of tokens bad_creds = Credentials.from_json(json_doc) c = Layer1(session_token=bad_creds) # First create a table table_name = 'test-%d' % int(time.time()) hash_key_name = 'forum_name' hash_key_type = 'S' range_key_name = 'subject' range_key_type = 'S' read_units = 5 write_units = 5 schema = {'HashKeyElement': {'AttributeName': hash_key_name, 'AttributeType': hash_key_type}, 'RangeKeyElement': {'AttributeName': range_key_name, 'AttributeType': range_key_type}} provisioned_throughput = {'ReadCapacityUnits': read_units, 'WriteCapacityUnits': write_units} result = c.create_table(table_name, schema, provisioned_throughput) assert result['TableDescription']['TableName'] == table_name result_schema = result['TableDescription']['KeySchema'] assert result_schema['HashKeyElement']['AttributeName'] == hash_key_name assert result_schema['HashKeyElement']['AttributeType'] == hash_key_type assert result_schema['RangeKeyElement']['AttributeName'] == range_key_name assert result_schema['RangeKeyElement']['AttributeType'] == range_key_type result_thruput = result['TableDescription']['ProvisionedThroughput'] assert result_thruput['ReadCapacityUnits'] == read_units assert result_thruput['WriteCapacityUnits'] == write_units # Wait for table to become active result = c.describe_table(table_name) while result['Table']['TableStatus'] != 'ACTIVE': time.sleep(5) result = c.describe_table(table_name) # List tables and make sure new one is there result = c.list_tables() assert table_name in result['TableNames'] # Update the tables ProvisionedThroughput new_read_units = 10 new_write_units = 5 new_provisioned_throughput = {'ReadCapacityUnits': new_read_units, 'WriteCapacityUnits': new_write_units} result = c.update_table(table_name, new_provisioned_throughput) # Wait for table to be updated result = c.describe_table(table_name) while result['Table']['TableStatus'] == 'UPDATING': time.sleep(5) result = c.describe_table(table_name) result_thruput = result['Table']['ProvisionedThroughput'] assert result_thruput['ReadCapacityUnits'] == new_read_units assert result_thruput['WriteCapacityUnits'] == new_write_units # Put an item item1_key = 'Amazon DynamoDB' item1_range = 'DynamoDB Thread 1' item1_data = { hash_key_name: {hash_key_type: item1_key}, range_key_name: {range_key_type: item1_range}, 'Message': {'S': 'DynamoDB thread 1 message text'}, 'LastPostedBy': {'S': 'User A'}, 'Views': {'N': '0'}, 'Replies': {'N': '0'}, 'Answered': {'N': '0'}, 'Tags': {'SS': ["index", "primarykey", "table"]}, 'LastPostDateTime': {'S': '12/9/2011 11:36:03 PM'} } result = c.put_item(table_name, item1_data) # Now do a consistent read and check results key1 = {'HashKeyElement': {hash_key_type: item1_key}, 'RangeKeyElement': {range_key_type: item1_range}} result = c.get_item(table_name, key=key1, consistent_read=True) for name in item1_data: assert name in result['Item'] # Try to get an item that does not exist. invalid_key = {'HashKeyElement': {hash_key_type: 'bogus_key'}, 'RangeKeyElement': {range_key_type: item1_range}} self.assertRaises(DynamoDBKeyNotFoundError, c.get_item, table_name, key=invalid_key) # Try retrieving only select attributes attributes = ['Message', 'Views'] result = c.get_item(table_name, key=key1, consistent_read=True, attributes_to_get=attributes) for name in result['Item']: assert name in attributes # Try to delete the item with the wrong Expected value expected = {'Views': {'Value': {'N': '1'}}} self.assertRaises(DynamoDBConditionalCheckFailedError, c.delete_item, table_name, key=key1, expected=expected) # Now update the existing object attribute_updates = {'Views': {'Value': {'N': '5'}, 'Action': 'PUT'}, 'Tags': {'Value': {'SS': ['foobar']}, 'Action': 'ADD'}} result = c.update_item(table_name, key=key1, attribute_updates=attribute_updates) # Try and update an item, in a fashion which makes it too large. # The new message text is the item size limit minus 32 bytes and # the current object is larger than 32 bytes. item_size_overflow_text = 'Text to be padded'.zfill(64*1024-32) attribute_updates = {'Message': {'Value': {'S': item_size_overflow_text}, 'Action': 'PUT'}} self.assertRaises(DynamoDBValidationError, c.update_item, table_name, key=key1, attribute_updates=attribute_updates) # Put a few more items into the table item2_key = 'Amazon DynamoDB' item2_range = 'DynamoDB Thread 2' item2_data = { hash_key_name: {hash_key_type: item2_key}, range_key_name: {range_key_type: item2_range}, 'Message': {'S': 'DynamoDB thread 2 message text'}, 'LastPostedBy': {'S': 'User A'}, 'Views': {'N': '0'}, 'Replies': {'N': '0'}, 'Answered': {'N': '0'}, 'Tags': {'SS': ["index", "primarykey", "table"]}, 'LastPostDateTime': {'S': '12/9/2011 11:36:03 PM'} } result = c.put_item(table_name, item2_data) key2 = {'HashKeyElement': {hash_key_type: item2_key}, 'RangeKeyElement': {range_key_type: item2_range}} item3_key = 'Amazon S3' item3_range = 'S3 Thread 1' item3_data = { hash_key_name: {hash_key_type: item3_key}, range_key_name: {range_key_type: item3_range}, 'Message': {'S': 'S3 Thread 1 message text'}, 'LastPostedBy': {'S': 'User A'}, 'Views': {'N': '0'}, 'Replies': {'N': '0'}, 'Answered': {'N': '0'}, 'Tags': {'SS': ['largeobject', 'multipart upload']}, 'LastPostDateTime': {'S': '12/9/2011 11:36:03 PM'} } result = c.put_item(table_name, item3_data) key3 = {'HashKeyElement': {hash_key_type: item3_key}, 'RangeKeyElement': {range_key_type: item3_range}} # Try a few queries result = c.query(table_name, {'S': 'Amazon DynamoDB'}, {'AttributeValueList': [{'S': 'DynamoDB'}], 'ComparisonOperator': 'BEGINS_WITH'}) assert 'Count' in result assert result['Count'] == 2 # Try a few scans result = c.scan(table_name, {'Tags': {'AttributeValueList':[{'S': 'table'}], 'ComparisonOperator': 'CONTAINS'}}) assert 'Count' in result assert result['Count'] == 2 # Now delete the items result = c.delete_item(table_name, key=key1) result = c.delete_item(table_name, key=key2) result = c.delete_item(table_name, key=key3) # Now delete the table result = c.delete_table(table_name) assert result['TableDescription']['TableStatus'] == 'DELETING' print '--- tests completed ---'
def test_layer1_basic(self): print '--- running DynamoDB Layer1 tests ---' # Create a Layer1 connection with an expired set of # credentials to test the automatic renewal of tokens bad_creds = Credentials.from_json(json_doc) c = Layer1(session_token=bad_creds) # First create a table table_name = 'test-%d' % int(time.time()) hash_key_name = 'forum_name' hash_key_type = 'S' range_key_name = 'subject' range_key_type = 'S' read_units = 5 write_units = 5 schema = {'HashKeyElement': {'AttributeName': hash_key_name, 'AttributeType': hash_key_type}, 'RangeKeyElement': {'AttributeName': range_key_name, 'AttributeType': range_key_type}} provisioned_throughput = {'ReadCapacityUnits': read_units, 'WriteCapacityUnits': write_units} result = c.create_table(table_name, schema, provisioned_throughput) assert result['TableDescription']['TableName'] == table_name result_schema = result['TableDescription']['KeySchema'] assert result_schema['HashKeyElement']['AttributeName'] == hash_key_name assert result_schema['HashKeyElement']['AttributeType'] == hash_key_type assert result_schema['RangeKeyElement']['AttributeName'] == range_key_name assert result_schema['RangeKeyElement']['AttributeType'] == range_key_type result_thruput = result['TableDescription']['ProvisionedThroughput'] assert result_thruput['ReadCapacityUnits'] == read_units assert result_thruput['WriteCapacityUnits'] == write_units # Wait for table to become active result = c.describe_table(table_name) while result['Table']['TableStatus'] != 'ACTIVE': time.sleep(5) result = c.describe_table(table_name) # List tables and make sure new one is there result = c.list_tables() assert table_name in result['TableNames'] # Update the tables ProvisionedThroughput new_read_units = 10 new_write_units = 5 new_provisioned_throughput = {'ReadCapacityUnits': new_read_units, 'WriteCapacityUnits': new_write_units} result = c.update_table(table_name, new_provisioned_throughput) # Wait for table to be updated result = c.describe_table(table_name) while result['Table']['TableStatus'] == 'UPDATING': time.sleep(5) result = c.describe_table(table_name) result_thruput = result['Table']['ProvisionedThroughput'] assert result_thruput['ReadCapacityUnits'] == new_read_units assert result_thruput['WriteCapacityUnits'] == new_write_units # Put an item item1_key = 'Amazon DynamoDB' item1_range = 'DynamoDB Thread 1' item1_data = { hash_key_name: {hash_key_type: item1_key}, range_key_name: {range_key_type: item1_range}, 'Message': {'S': 'DynamoDB thread 1 message text'}, 'LastPostedBy': {'S': 'User A'}, 'Views': {'N': '0'}, 'Replies': {'N': '0'}, 'Answered': {'N': '0'}, 'Tags': {'SS': ["index", "primarykey", "table"]}, 'LastPostDateTime': {'S': '12/9/2011 11:36:03 PM'} } result = c.put_item(table_name, item1_data) # Now do a consistent read and check results key1 = {'HashKeyElement': {hash_key_type: item1_key}, 'RangeKeyElement': {range_key_type: item1_range}} result = c.get_item(table_name, key=key1, consistent_read=True) for name in item1_data: assert name in result['Item'] # Try to get an item that does not exist. invalid_key = {'HashKeyElement': {hash_key_type: 'bogus_key'}, 'RangeKeyElement': {range_key_type: item1_range}} self.assertRaises(DynamoDBKeyNotFoundError, c.get_item, table_name, key=invalid_key) # Try retrieving only select attributes attributes = ['Message', 'Views'] result = c.get_item(table_name, key=key1, consistent_read=True, attributes_to_get=attributes) for name in result['Item']: assert name in attributes # Try to delete the item with the wrong Expected value expected = {'Views': {'Value': {'N': '1'}}} try: result = c.delete_item('table_name', key=key1, expected=expected) except c.ResponseError, e: pass
def test_layer1_basic(self): print '--- running DynamoDB Layer1 tests ---' # Create a Layer1 connection with an expired set of # credentials to test the automatic renewal of tokens bad_creds = Credentials.from_json(json_doc) c = Layer1(session_token=bad_creds) # First create a table table_name = 'test-%d' % int(time.time()) hash_key_name = 'forum_name' hash_key_type = 'S' range_key_name = 'subject' range_key_type = 'S' read_units = 5 write_units = 5 schema = { 'HashKeyElement': { 'AttributeName': hash_key_name, 'AttributeType': hash_key_type }, 'RangeKeyElement': { 'AttributeName': range_key_name, 'AttributeType': range_key_type } } provisioned_throughput = { 'ReadCapacityUnits': read_units, 'WriteCapacityUnits': write_units } result = c.create_table(table_name, schema, provisioned_throughput) assert result['TableDescription']['TableName'] == table_name result_schema = result['TableDescription']['KeySchema'] assert result_schema['HashKeyElement'][ 'AttributeName'] == hash_key_name assert result_schema['HashKeyElement'][ 'AttributeType'] == hash_key_type assert result_schema['RangeKeyElement'][ 'AttributeName'] == range_key_name assert result_schema['RangeKeyElement'][ 'AttributeType'] == range_key_type result_thruput = result['TableDescription']['ProvisionedThroughput'] assert result_thruput['ReadCapacityUnits'] == read_units assert result_thruput['WriteCapacityUnits'] == write_units # Wait for table to become active result = c.describe_table(table_name) while result['Table']['TableStatus'] != 'ACTIVE': time.sleep(5) result = c.describe_table(table_name) # List tables and make sure new one is there result = c.list_tables() assert table_name in result['TableNames'] # Update the tables ProvisionedThroughput new_read_units = 10 new_write_units = 5 new_provisioned_throughput = { 'ReadCapacityUnits': new_read_units, 'WriteCapacityUnits': new_write_units } result = c.update_table(table_name, new_provisioned_throughput) # Wait for table to be updated result = c.describe_table(table_name) while result['Table']['TableStatus'] == 'UPDATING': time.sleep(5) result = c.describe_table(table_name) result_thruput = result['Table']['ProvisionedThroughput'] assert result_thruput['ReadCapacityUnits'] == new_read_units assert result_thruput['WriteCapacityUnits'] == new_write_units # Put an item item1_key = 'Amazon DynamoDB' item1_range = 'DynamoDB Thread 1' item1_data = { hash_key_name: { hash_key_type: item1_key }, range_key_name: { range_key_type: item1_range }, 'Message': { 'S': 'DynamoDB thread 1 message text' }, 'LastPostedBy': { 'S': 'User A' }, 'Views': { 'N': '0' }, 'Replies': { 'N': '0' }, 'Answered': { 'N': '0' }, 'Tags': { 'SS': ["index", "primarykey", "table"] }, 'LastPostDateTime': { 'S': '12/9/2011 11:36:03 PM' } } result = c.put_item(table_name, item1_data) # Now do a consistent read and check results key1 = { 'HashKeyElement': { hash_key_type: item1_key }, 'RangeKeyElement': { range_key_type: item1_range } } result = c.get_item(table_name, key=key1, consistent_read=True) for name in item1_data: assert name in result['Item'] # Try to get an item that does not exist. invalid_key = { 'HashKeyElement': { hash_key_type: 'bogus_key' }, 'RangeKeyElement': { range_key_type: item1_range } } self.assertRaises(DynamoDBKeyNotFoundError, c.get_item, table_name, key=invalid_key) # Try retrieving only select attributes attributes = ['Message', 'Views'] result = c.get_item(table_name, key=key1, consistent_read=True, attributes_to_get=attributes) for name in result['Item']: assert name in attributes # Try to delete the item with the wrong Expected value expected = {'Views': {'Value': {'N': '1'}}} self.assertRaises(DynamoDBConditionalCheckFailedError, c.delete_item, table_name, key=key1, expected=expected) # Now update the existing object attribute_updates = { 'Views': { 'Value': { 'N': '5' }, 'Action': 'PUT' }, 'Tags': { 'Value': { 'SS': ['foobar'] }, 'Action': 'ADD' } } result = c.update_item(table_name, key=key1, attribute_updates=attribute_updates) # Try and update an item, in a fashion which makes it too large. # The new message text is the item size limit minus 32 bytes and # the current object is larger than 32 bytes. item_size_overflow_text = 'Text to be padded'.zfill(64 * 1024 - 32) attribute_updates = { 'Message': { 'Value': { 'S': item_size_overflow_text }, 'Action': 'PUT' } } self.assertRaises(DynamoDBValidationError, c.update_item, table_name, key=key1, attribute_updates=attribute_updates) # Put a few more items into the table item2_key = 'Amazon DynamoDB' item2_range = 'DynamoDB Thread 2' item2_data = { hash_key_name: { hash_key_type: item2_key }, range_key_name: { range_key_type: item2_range }, 'Message': { 'S': 'DynamoDB thread 2 message text' }, 'LastPostedBy': { 'S': 'User A' }, 'Views': { 'N': '0' }, 'Replies': { 'N': '0' }, 'Answered': { 'N': '0' }, 'Tags': { 'SS': ["index", "primarykey", "table"] }, 'LastPostDateTime': { 'S': '12/9/2011 11:36:03 PM' } } result = c.put_item(table_name, item2_data) key2 = { 'HashKeyElement': { hash_key_type: item2_key }, 'RangeKeyElement': { range_key_type: item2_range } } item3_key = 'Amazon S3' item3_range = 'S3 Thread 1' item3_data = { hash_key_name: { hash_key_type: item3_key }, range_key_name: { range_key_type: item3_range }, 'Message': { 'S': 'S3 Thread 1 message text' }, 'LastPostedBy': { 'S': 'User A' }, 'Views': { 'N': '0' }, 'Replies': { 'N': '0' }, 'Answered': { 'N': '0' }, 'Tags': { 'SS': ['largeobject', 'multipart upload'] }, 'LastPostDateTime': { 'S': '12/9/2011 11:36:03 PM' } } result = c.put_item(table_name, item3_data) key3 = { 'HashKeyElement': { hash_key_type: item3_key }, 'RangeKeyElement': { range_key_type: item3_range } } # Try a few queries result = c.query(table_name, {'S': 'Amazon DynamoDB'}, { 'AttributeValueList': [{ 'S': 'DynamoDB' }], 'ComparisonOperator': 'BEGINS_WITH' }) assert 'Count' in result assert result['Count'] == 2 # Try a few scans result = c.scan( table_name, { 'Tags': { 'AttributeValueList': [{ 'S': 'table' }], 'ComparisonOperator': 'CONTAINS' } }) assert 'Count' in result assert result['Count'] == 2 # Now delete the items result = c.delete_item(table_name, key=key1) result = c.delete_item(table_name, key=key2) result = c.delete_item(table_name, key=key3) # Now delete the table result = c.delete_table(table_name) assert result['TableDescription']['TableStatus'] == 'DELETING' print '--- tests completed ---'
def _authenticate_(self, account, user, passwd, new_passwd=None, timeout=15, duration=3600): auth_path = self.TEMPLATE.format(dur=duration) if not self.dns_enabled: auth_path = self.NON_DNS_QUERY_PATH + auth_path else: auth_path = '/' + auth_path host = self.host if self.dns_enabled: host = 'tokens.{0}'.format(host) if self.validate_certs: conn = CertValidatingHTTPSConnection(host, self.port, timeout=timeout, **self.kwargs) else: conn = HttpsConnectionFactory(self.port).https_connection_factory( host, timeout=timeout) if new_passwd: auth_string = u"{user}@{account};{pw}@{new_pw}".format( user=base64.b64encode(user), account=base64.b64encode(account), pw=base64.b64encode(passwd), new_pw=new_passwd) else: auth_string = u"{user}@{account}:{pw}".format( user=base64.b64encode(user), account=base64.b64encode(account), pw=passwd) encoded_auth = base64.b64encode(auth_string) headers = {'Authorization': "Basic %s" % encoded_auth} try: conn.request('GET', auth_path, '', headers) response = conn.getresponse() if response.status != 200: raise urllib2.HTTPError(url='', code=response.status, msg=response.reason, hdrs=None, fp=None) body = response.read() # parse AccessKeyId, SecretAccessKey and SessionToken creds = Credentials() h = BotoXmlHandler(creds, None) parseString(body, h) return creds except SSLError as err: if err.message != '': raise urllib2.URLError(str(err)) else: raise urllib2.URLError(err[1]) except socket.error as err: # when dns enabled, but path cloud, we get here with # err=gaierror(8, 'nodename nor servname provided, or not known') # when dns disabled, but path cloud, we get here with # err=gaierror(8, 'nodename nor servname provided, or not known') raise urllib2.URLError(str(err))