def put_metrics(self, build_time_metrics): if not build_time_metrics: return None try: item = self.table.get_item(instance_type=_get_instance_type(self.localrun), config=self.benchmark_config) self.logger.debug("Found existing entity in dynamodb") except ItemNotFound: self.logger.debug("No existing entity found in dynamodb, creating new one") item = Item(self.table, data={'instance_type': _get_instance_type(self.localrun), 'config': self.benchmark_config}) build_time_json = item['build_time'] if build_time_json: self.logger.debug("Extending existing metric list for build_time") # extend existing list build_time = json.loads(build_time_json) build_time.extend(build_time_metrics) item['build_time'] = json.dumps(build_time) else: item['build_time'] = json.dumps(build_time_metrics) if item.needs_save(): item.partial_save() self.logger.debug("Saved item to dynamodb")
def activate(table, project, playbook, progress_str): print('.', end='') try: state = table.get_item(project=project, consistent=True) except ItemNotFound: state = Item(table, data={ 'project': project, 'state': 'idle', }) if state['state'] == 'blocked': raise ProjectBlockedException() if state['state'] == 'active': raise ProjectActiveException() state['state'] = 'active' state.partial_save() return state
def test_integration(self): # Test creating a full table with all options specified. users = Table.create('users', schema=[ HashKey('username'), RangeKey('friend_count', data_type=NUMBER) ], throughput={ 'read': 5, 'write': 5, }, indexes=[ KeysOnlyIndex('LastNameIndex', parts=[ HashKey('username'), RangeKey('last_name') ]), ]) self.addCleanup(users.delete) self.assertEqual(len(users.schema), 2) self.assertEqual(users.throughput['read'], 5) # Wait for it. time.sleep(60) # Make sure things line up if we're introspecting the table. users_hit_api = Table('users') users_hit_api.describe() self.assertEqual(len(users.schema), len(users_hit_api.schema)) self.assertEqual(users.throughput, users_hit_api.throughput) self.assertEqual(len(users.indexes), len(users_hit_api.indexes)) # Test putting some items individually. users.put_item(data={ 'username': '******', 'first_name': 'John', 'last_name': 'Doe', 'friend_count': 4 }) users.put_item(data={ 'username': '******', 'first_name': 'Alice', 'last_name': 'Expert', 'friend_count': 2 }) time.sleep(5) # Test batch writing. with users.batch_write() as batch: batch.put_item({ 'username': '******', 'first_name': 'Jane', 'last_name': 'Doe', 'friend_count': 3 }) batch.delete_item(username='******', friend_count=2) batch.put_item({ 'username': '******', 'first_name': 'Bob', 'last_name': 'Smith', 'friend_count': 1 }) time.sleep(5) # Test getting an item & updating it. # This is the "safe" variant (only write if there have been no # changes). jane = users.get_item(username='******', friend_count=3) self.assertEqual(jane['first_name'], 'Jane') jane['last_name'] = 'Doh' self.assertTrue(jane.save()) # Test strongly consistent getting of an item. # Additionally, test the overwrite behavior. client_1_jane = users.get_item( username='******', friend_count=3, consistent=True ) self.assertEqual(jane['first_name'], 'Jane') client_2_jane = users.get_item( username='******', friend_count=3, consistent=True ) self.assertEqual(jane['first_name'], 'Jane') # Write & assert the ``first_name`` is gone, then... del client_1_jane['first_name'] self.assertTrue(client_1_jane.save()) check_name = users.get_item( username='******', friend_count=3, consistent=True ) self.assertEqual(check_name['first_name'], None) # ...overwrite the data with what's in memory. client_2_jane['first_name'] = 'Joan' # Now a write that fails due to default expectations... self.assertRaises(exceptions.JSONResponseError, client_2_jane.save) # ... so we force an overwrite. self.assertTrue(client_2_jane.save(overwrite=True)) check_name_again = users.get_item( username='******', friend_count=3, consistent=True ) self.assertEqual(check_name_again['first_name'], 'Joan') # Reset it. jane['username'] = '******' jane['first_name'] = 'Jane' jane['last_name'] = 'Doe' jane['friend_count'] = 3 self.assertTrue(jane.save(overwrite=True)) # Test the partial update behavior. client_3_jane = users.get_item( username='******', friend_count=3, consistent=True ) client_4_jane = users.get_item( username='******', friend_count=3, consistent=True ) client_3_jane['favorite_band'] = 'Feed Me' # No ``overwrite`` needed due to new data. self.assertTrue(client_3_jane.save()) # Expectations are only checked on the ``first_name``, so what wouldn't # have succeeded by default does succeed here. client_4_jane['first_name'] = 'Jacqueline' self.assertTrue(client_4_jane.partial_save()) partial_jane = users.get_item( username='******', friend_count=3, consistent=True ) self.assertEqual(partial_jane['favorite_band'], 'Feed Me') self.assertEqual(partial_jane['first_name'], 'Jacqueline') # Reset it. jane['username'] = '******' jane['first_name'] = 'Jane' jane['last_name'] = 'Doe' jane['friend_count'] = 3 self.assertTrue(jane.save(overwrite=True)) # Ensure that partial saves of a brand-new object work. sadie = Item(users, data={ 'username': '******', 'first_name': 'Sadie', 'favorite_band': 'Zedd', 'friend_count': 7 }) self.assertTrue(sadie.partial_save()) serverside_sadie = users.get_item( username='******', friend_count=7, consistent=True ) self.assertEqual(serverside_sadie['first_name'], 'Sadie') # Test the eventually consistent query. results = users.query( username__eq='johndoe', last_name__eq='Doe', index='LastNameIndex', attributes=('username',), reverse=True ) for res in results: self.assertTrue(res['username'] in ['johndoe',]) self.assertEqual(res.keys(), ['username']) # Test the strongly consistent query. c_results = users.query( username__eq='johndoe', last_name__eq='Doe', index='LastNameIndex', reverse=True, consistent=True ) for res in c_results: self.assertTrue(res['username'] in ['johndoe',]) # Test scans without filters. all_users = users.scan(limit=7) self.assertEqual(all_users.next()['username'], 'bob') self.assertEqual(all_users.next()['username'], 'jane') self.assertEqual(all_users.next()['username'], 'johndoe') # Test scans with a filter. filtered_users = users.scan(limit=2, username__beginswith='j') self.assertEqual(filtered_users.next()['username'], 'jane') self.assertEqual(filtered_users.next()['username'], 'johndoe') # Test deleting a single item. johndoe = users.get_item(username='******', friend_count=4) johndoe.delete() # Test the eventually consistent batch get. results = users.batch_get(keys=[ {'username': '******', 'friend_count': 1}, {'username': '******', 'friend_count': 3} ]) batch_users = [] for res in results: batch_users.append(res) self.assertTrue(res['first_name'] in ['Bob', 'Jane']) self.assertEqual(len(batch_users), 2) # Test the strongly consistent batch get. c_results = users.batch_get(keys=[ {'username': '******', 'friend_count': 1}, {'username': '******', 'friend_count': 3} ], consistent=True) c_batch_users = [] for res in c_results: c_batch_users.append(res) self.assertTrue(res['first_name'] in ['Bob', 'Jane']) self.assertEqual(len(c_batch_users), 2) # Test count, but in a weak fashion. Because lag time. self.assertTrue(users.count() > -1) # Test query count count = users.query_count( username__eq='bob', ) self.assertEqual(count, 1) # Test without LSIs (describe calls shouldn't fail). admins = Table.create('admins', schema=[ HashKey('username') ]) self.addCleanup(admins.delete) time.sleep(60) admins.describe() self.assertEqual(admins.throughput['read'], 5) self.assertEqual(admins.indexes, []) # A single query term should fail on a table with *ONLY* a HashKey. self.assertRaises( exceptions.QueryError, admins.query, username__eq='johndoe' ) # But it shouldn't break on more complex tables. res = users.query(username__eq='johndoe') # Test putting with/without sets. mau5_created = users.put_item(data={ 'username': '******', 'first_name': 'dead', 'last_name': 'mau5', 'friend_count': 2, 'friends': set(['skrill', 'penny']), }) self.assertTrue(mau5_created) penny_created = users.put_item(data={ 'username': '******', 'first_name': 'Penny', 'friend_count': 0, 'friends': set([]), }) self.assertTrue(penny_created)
def test_integration(self): # Test creating a full table with all options specified. users = Table.create('users', schema=[ HashKey('username'), RangeKey('friend_count', data_type=NUMBER) ], throughput={ 'read': 5, 'write': 5, }, indexes={ KeysOnlyIndex('LastNameIndex', parts=[ HashKey('username'), RangeKey('last_name') ]), }) self.addCleanup(users.delete) self.assertEqual(len(users.schema), 2) self.assertEqual(users.throughput['read'], 5) # Wait for it. time.sleep(60) # Make sure things line up if we're introspecting the table. users_hit_api = Table('users') users_hit_api.describe() self.assertEqual(len(users.schema), len(users_hit_api.schema)) self.assertEqual(users.throughput, users_hit_api.throughput) self.assertEqual(len(users.indexes), len(users_hit_api.indexes)) # Test putting some items individually. users.put_item(data={ 'username': '******', 'first_name': 'John', 'last_name': 'Doe', 'friend_count': 4 }) users.put_item(data={ 'username': '******', 'first_name': 'Alice', 'last_name': 'Expert', 'friend_count': 2 }) time.sleep(5) # Test batch writing. with users.batch_write() as batch: batch.put_item({ 'username': '******', 'first_name': 'Jane', 'last_name': 'Doe', 'friend_count': 3 }) batch.delete_item(username='******', friend_count=2) batch.put_item({ 'username': '******', 'first_name': 'Bob', 'last_name': 'Smith', 'friend_count': 1 }) time.sleep(5) # Test getting an item & updating it. # This is the "safe" variant (only write if there have been no # changes). jane = users.get_item(username='******', friend_count=3) self.assertEqual(jane['first_name'], 'Jane') jane['last_name'] = 'Doh' self.assertTrue(jane.save()) # Test strongly consistent getting of an item. # Additionally, test the overwrite behavior. client_1_jane = users.get_item( username='******', friend_count=3, consistent=True ) self.assertEqual(jane['first_name'], 'Jane') client_2_jane = users.get_item( username='******', friend_count=3, consistent=True ) self.assertEqual(jane['first_name'], 'Jane') # Write & assert the ``first_name`` is gone, then... del client_1_jane['first_name'] self.assertTrue(client_1_jane.save()) check_name = users.get_item( username='******', friend_count=3, consistent=True ) self.assertEqual(check_name['first_name'], None) # ...overwrite the data with what's in memory. client_2_jane['first_name'] = 'Joan' # Now a write that fails due to default expectations... self.assertRaises(exceptions.JSONResponseError, client_2_jane.save) # ... so we force an overwrite. self.assertTrue(client_2_jane.save(overwrite=True)) check_name_again = users.get_item( username='******', friend_count=3, consistent=True ) self.assertEqual(check_name_again['first_name'], 'Joan') # Reset it. jane['username'] = '******' jane['first_name'] = 'Jane' jane['last_name'] = 'Doe' jane['friend_count'] = 3 self.assertTrue(jane.save(overwrite=True)) # Test the partial update behavior. client_3_jane = users.get_item( username='******', friend_count=3, consistent=True ) client_4_jane = users.get_item( username='******', friend_count=3, consistent=True ) client_3_jane['favorite_band'] = 'Feed Me' # No ``overwrite`` needed due to new data. self.assertTrue(client_3_jane.save()) # Expectations are only checked on the ``first_name``, so what wouldn't # have succeeded by default does succeed here. client_4_jane['first_name'] = 'Jacqueline' self.assertTrue(client_4_jane.partial_save()) partial_jane = users.get_item( username='******', friend_count=3, consistent=True ) self.assertEqual(partial_jane['favorite_band'], 'Feed Me') self.assertEqual(partial_jane['first_name'], 'Jacqueline') # Reset it. jane['username'] = '******' jane['first_name'] = 'Jane' jane['last_name'] = 'Doe' jane['friend_count'] = 3 self.assertTrue(jane.save(overwrite=True)) # Ensure that partial saves of a brand-new object work. sadie = Item(users, data={ 'username': '******', 'first_name': 'Sadie', 'favorite_band': 'Zedd', 'friend_count': 7 }) self.assertTrue(sadie.partial_save()) serverside_sadie = users.get_item( username='******', friend_count=7, consistent=True ) self.assertEqual(serverside_sadie['first_name'], 'Sadie') # Test the eventually consistent query. results = users.query( username__eq='johndoe', last_name__eq='Doe', index='LastNameIndex', attributes=('username',), reverse=True ) for res in results: self.assertTrue(res['username'] in ['johndoe',]) self.assertEqual(res.keys(), ['username']) # Test the strongly consistent query. c_results = users.query( username__eq='johndoe', last_name__eq='Doe', index='LastNameIndex', reverse=True, consistent=True ) for res in c_results: self.assertTrue(res['username'] in ['johndoe',]) # Test scans without filters. all_users = users.scan(limit=7) self.assertEqual(all_users.next()['username'], 'bob') self.assertEqual(all_users.next()['username'], 'jane') self.assertEqual(all_users.next()['username'], 'johndoe') # Test scans with a filter. filtered_users = users.scan(limit=2, username__beginswith='j') self.assertEqual(filtered_users.next()['username'], 'jane') self.assertEqual(filtered_users.next()['username'], 'johndoe') # Test deleting a single item. johndoe = users.get_item(username='******', friend_count=4) johndoe.delete() # Test the eventually consistent batch get. results = users.batch_get(keys=[ {'username': '******', 'friend_count': 1}, {'username': '******', 'friend_count': 3} ]) batch_users = [] for res in results: batch_users.append(res) self.assertTrue(res['first_name'] in ['Bob', 'Jane']) self.assertEqual(len(batch_users), 2) # Test the strongly consistent batch get. c_results = users.batch_get(keys=[ {'username': '******', 'friend_count': 1}, {'username': '******', 'friend_count': 3} ], consistent=True) c_batch_users = [] for res in c_results: c_batch_users.append(res) self.assertTrue(res['first_name'] in ['Bob', 'Jane']) self.assertEqual(len(c_batch_users), 2) # Test count, but in a weak fashion. Because lag time. self.assertTrue(users.count() > -1) # Test query count count = users.query_count( username__eq='bob', ) self.assertEqual(count, 1) # Test without LSIs (describe calls shouldn't fail). admins = Table.create('admins', schema=[ HashKey('username') ]) self.addCleanup(admins.delete) time.sleep(60) admins.describe() self.assertEqual(admins.throughput['read'], 5) self.assertEqual(admins.indexes, []) # A single query term should fail on a table with *ONLY* a HashKey. self.assertRaises( exceptions.QueryError, admins.query, username__eq='johndoe' ) # But it shouldn't break on more complex tables. res = users.query(username__eq='johndoe') # Test putting with/without sets. mau5_created = users.put_item(data={ 'username': '******', 'first_name': 'dead', 'last_name': 'mau5', 'friend_count': 2, 'friends': set(['skrill', 'penny']), }) self.assertTrue(mau5_created) penny_created = users.put_item(data={ 'username': '******', 'first_name': 'Penny', 'friend_count': 0, 'friends': set([]), }) self.assertTrue(penny_created)
def test_integration(self): # Test creating a full table with all options specified. users = Table.create( "users", schema=[HashKey("username"), RangeKey("friend_count", data_type=NUMBER)], throughput={"read": 5, "write": 5}, indexes=[KeysOnlyIndex("LastNameIndex", parts=[HashKey("username"), RangeKey("last_name")])], ) self.addCleanup(users.delete) self.assertEqual(len(users.schema), 2) self.assertEqual(users.throughput["read"], 5) # Wait for it. time.sleep(60) # Make sure things line up if we're introspecting the table. users_hit_api = Table("users") users_hit_api.describe() self.assertEqual(len(users.schema), len(users_hit_api.schema)) self.assertEqual(users.throughput, users_hit_api.throughput) self.assertEqual(len(users.indexes), len(users_hit_api.indexes)) # Test putting some items individually. users.put_item(data={"username": "******", "first_name": "John", "last_name": "Doe", "friend_count": 4}) users.put_item(data={"username": "******", "first_name": "Alice", "last_name": "Expert", "friend_count": 2}) time.sleep(5) # Test batch writing. with users.batch_write() as batch: batch.put_item({"username": "******", "first_name": "Jane", "last_name": "Doe", "friend_count": 3}) batch.delete_item(username="******", friend_count=2) batch.put_item({"username": "******", "first_name": "Bob", "last_name": "Smith", "friend_count": 1}) time.sleep(5) # Test getting an item & updating it. # This is the "safe" variant (only write if there have been no # changes). jane = users.get_item(username="******", friend_count=3) self.assertEqual(jane["first_name"], "Jane") jane["last_name"] = "Doh" self.assertTrue(jane.save()) # Test strongly consistent getting of an item. # Additionally, test the overwrite behavior. client_1_jane = users.get_item(username="******", friend_count=3, consistent=True) self.assertEqual(jane["first_name"], "Jane") client_2_jane = users.get_item(username="******", friend_count=3, consistent=True) self.assertEqual(jane["first_name"], "Jane") # Write & assert the ``first_name`` is gone, then... del client_1_jane["first_name"] self.assertTrue(client_1_jane.save()) check_name = users.get_item(username="******", friend_count=3, consistent=True) self.assertEqual(check_name["first_name"], None) # ...overwrite the data with what's in memory. client_2_jane["first_name"] = "Joan" # Now a write that fails due to default expectations... self.assertRaises(exceptions.JSONResponseError, client_2_jane.save) # ... so we force an overwrite. self.assertTrue(client_2_jane.save(overwrite=True)) check_name_again = users.get_item(username="******", friend_count=3, consistent=True) self.assertEqual(check_name_again["first_name"], "Joan") # Reset it. jane["username"] = "******" jane["first_name"] = "Jane" jane["last_name"] = "Doe" jane["friend_count"] = 3 self.assertTrue(jane.save(overwrite=True)) # Test the partial update behavior. client_3_jane = users.get_item(username="******", friend_count=3, consistent=True) client_4_jane = users.get_item(username="******", friend_count=3, consistent=True) client_3_jane["favorite_band"] = "Feed Me" # No ``overwrite`` needed due to new data. self.assertTrue(client_3_jane.save()) # Expectations are only checked on the ``first_name``, so what wouldn't # have succeeded by default does succeed here. client_4_jane["first_name"] = "Jacqueline" self.assertTrue(client_4_jane.partial_save()) partial_jane = users.get_item(username="******", friend_count=3, consistent=True) self.assertEqual(partial_jane["favorite_band"], "Feed Me") self.assertEqual(partial_jane["first_name"], "Jacqueline") # Reset it. jane["username"] = "******" jane["first_name"] = "Jane" jane["last_name"] = "Doe" jane["friend_count"] = 3 self.assertTrue(jane.save(overwrite=True)) # Ensure that partial saves of a brand-new object work. sadie = Item( users, data={"username": "******", "first_name": "Sadie", "favorite_band": "Zedd", "friend_count": 7} ) self.assertTrue(sadie.partial_save()) serverside_sadie = users.get_item(username="******", friend_count=7, consistent=True) self.assertEqual(serverside_sadie["first_name"], "Sadie") # Test the eventually consistent query. results = users.query( username__eq="johndoe", last_name__eq="Doe", index="LastNameIndex", attributes=("username",), reverse=True ) for res in results: self.assertTrue(res["username"] in ["johndoe"]) self.assertEqual(res.keys(), ["username"]) # Test the strongly consistent query. c_results = users.query( username__eq="johndoe", last_name__eq="Doe", index="LastNameIndex", reverse=True, consistent=True ) for res in c_results: self.assertTrue(res["username"] in ["johndoe"]) # Test scans without filters. all_users = users.scan(limit=7) self.assertEqual(all_users.next()["username"], "bob") self.assertEqual(all_users.next()["username"], "jane") self.assertEqual(all_users.next()["username"], "johndoe") # Test scans with a filter. filtered_users = users.scan(limit=2, username__beginswith="j") self.assertEqual(filtered_users.next()["username"], "jane") self.assertEqual(filtered_users.next()["username"], "johndoe") # Test deleting a single item. johndoe = users.get_item(username="******", friend_count=4) johndoe.delete() # Test the eventually consistent batch get. results = users.batch_get( keys=[{"username": "******", "friend_count": 1}, {"username": "******", "friend_count": 3}] ) batch_users = [] for res in results: batch_users.append(res) self.assertTrue(res["first_name"] in ["Bob", "Jane"]) self.assertEqual(len(batch_users), 2) # Test the strongly consistent batch get. c_results = users.batch_get( keys=[{"username": "******", "friend_count": 1}, {"username": "******", "friend_count": 3}], consistent=True ) c_batch_users = [] for res in c_results: c_batch_users.append(res) self.assertTrue(res["first_name"] in ["Bob", "Jane"]) self.assertEqual(len(c_batch_users), 2) # Test count, but in a weak fashion. Because lag time. self.assertTrue(users.count() > -1) # Test query count count = users.query_count(username__eq="bob") self.assertEqual(count, 1) # Test without LSIs (describe calls shouldn't fail). admins = Table.create("admins", schema=[HashKey("username")]) self.addCleanup(admins.delete) time.sleep(60) admins.describe() self.assertEqual(admins.throughput["read"], 5) self.assertEqual(admins.indexes, []) # A single query term should fail on a table with *ONLY* a HashKey. self.assertRaises(exceptions.QueryError, admins.query, username__eq="johndoe") # But it shouldn't break on more complex tables. res = users.query(username__eq="johndoe") # Test putting with/without sets. mau5_created = users.put_item( data={ "username": "******", "first_name": "dead", "last_name": "mau5", "friend_count": 2, "friends": set(["skrill", "penny"]), } ) self.assertTrue(mau5_created) penny_created = users.put_item( data={"username": "******", "first_name": "Penny", "friend_count": 0, "friends": set([])} ) self.assertTrue(penny_created)
shard_iterator = k_conn.get_shard_iterator(stream_name, shard_id, shard_iterator_type) next_iterator = shard_iterator['ShardIterator'] shard_ids.append({'shard_id' : shard_id ,'shard_iterator' : shard_iterator['ShardIterator'] }) tries = 0 result = [] while tries < 100: tries += 1 try: response = k_conn.get_records(next_iterator, limit=1) next_iterator = response['NextShardIterator'] bookmark= Table(tableName) if len(response['Records'])> 0: for res in response['Records']: dbrecord = bookmark.get_item(redShiftEndpoint=dbHost) dbrecord['next_iterator'] = next_iterator print res['Data'] dbrecord['sequenceNumber'] = res['SequenceNumber'] dbrecord.partial_save() try: with psycopg2.connect(**db_args) as conn: with conn.cursor() as curs: curs.execute(res['Data']) except: pass else : print tries except ProvisionedThroughputExceededException as ptee: print (ptee.message) time.sleep(5)