def connect(cls): cls.connection = Connection(host=cls.DDB_LOCAL_HOST, region=cls.DDB_LOCAL_REGION) try: cls.connection.list_tables() except (TableError, ConnectionError) as e: print(e) raise EnvironmentError( "DynamoDB Local does not appear to be running on localhost, port {}! Cannot run tests!" )
def remove_from_db(self, uid: str) -> bool: try: conn = Connection(host=self.model.Meta.host, region=self.model.Meta.region) conn.delete_item(self.model.Meta.table_name, hash_key=uid, range_key=None) # Note that range key is optional return True except DeleteError as e: logger.error("failed to remove user from db", uid=uid) return False
def update_capacity(self, dynamo_host, dynamo_region, table_root, logger, config): """ Manage the DynamoDB tables: - Create if the tables don't exist - Update the read/write capacity if they do exist and there is a conflict between required and current :param dynamo_host: optional host, for local testing :param dynamo_region: aws region for the table :param table_root: prefix for table_name (e.g. flyby) :param logger: :return: """ models = [BackendModel, ServiceModel, TargetGroupModel, ResolverModel] for model in models: if dynamo_host: model.Meta.host = dynamo_host conn = Connection(host=dynamo_host) else: conn = Connection(region=dynamo_region) default_table_name = model.Meta.table_name if not model.Meta.table_name.startswith(table_root): model.Meta.table_name = "{0}-{1}".format(table_root, model.Meta.table_name) model.Meta.region = dynamo_region if not model.exists(): logger.info("Creating {} table".format(model.Meta.table_name)) read_capacity_units = self.return_capacity(default_table_name, config)['read_capacity_units'] write_capacity_units = self.return_capacity(default_table_name, config)['write_capacity_units'] model.create_table(read_capacity_units=read_capacity_units, write_capacity_units=write_capacity_units, wait=True ) else: table_name = model.Meta.table_name table_capacity = self.capacity_check(default_table_name, table_name, conn, config) if not table_capacity['result'] and table_capacity['decreases'] < 4: conn.update_table( table_name=model.Meta.table_name, read_capacity_units=table_capacity['read'], write_capacity_units=table_capacity['write'] ) logger.info("Updating {} table read/write capacity".format(model.Meta.table_name)) elif not table_capacity['result'] and table_capacity['write'] >= 4: logger.error("Unable to decrease capacity on {} table".format(model.Meta.table_name))
def handler(event, context): # Validation request = Request(**loads(event.get("body", {}))) # Business logic, running safely under a transaction with TransactWrite(connection=Connection(), client_request_token=generate_uuid()) as transaction: transaction.save( Store( name=request.name, delay=request.delay, working_since=request.working_since.strftime( "%Y-%m-%d %H:%M:%S"), )) for employee_name in request.employees: transaction.save(Employee(name=employee_name))
def test_signal_exception_post_signal(mock_uuid, mock_req): pre_recorded = [] UUID = '123-abc' def record_pre_dynamodb_send(sender, operation_name, table_name, req_uuid): pre_recorded.append((operation_name, table_name, req_uuid)) def record_post_dynamodb_send(sender, operation_name, table_name, req_uuid): raise ValueError() pre_dynamodb_send.connect(record_pre_dynamodb_send) post_dynamodb_send.connect(record_post_dynamodb_send) try: mock_uuid.uuid4.return_value = UUID mock_req.return_value = {'TableDescription': {'TableName': 'table', 'TableStatus': 'Creating'}} c = Connection() c.dispatch('CreateTable', {'TableName': 'MyTable'}) assert ('CreateTable', 'MyTable', UUID) == pre_recorded[0] finally: pre_dynamodb_send.disconnect(record_pre_dynamodb_send) post_dynamodb_send.disconnect(record_post_dynamodb_send)
def test_commit(self, mocker): connection = Connection() mock_connection_transact_get = mocker.patch.object( connection, 'transact_get_items') with patch(PATCH_METHOD) as req: req.return_value = MOCK_TABLE_DESCRIPTOR with TransactGet(connection=connection) as t: t.get(MockModel, 1, 2) mock_connection_transact_get.assert_called_once_with( get_items=[{ 'Key': { 'MockHash': { 'N': '1' }, 'MockRange': { 'N': '2' } }, 'TableName': 'mock' }], return_consumed_capacity=None)
# https://pynamodb.readthedocs.io/en/latest/low_level.html # Creating a connection # Creating a connection is simple from pynamodb.connection import Connection conn = Connection() # You can specify a different DynamoDB url conn = Connection(host='http://alternative-domain/') # By default, PynamoDB will connect to the us-east-1 region, but you can specify a different one. conn = Connection(region='us-west-1') # Modifying tables # You can easily list tables: conn.list_tables() # or delete a table: # conn.delete_table('Thread') # If you want to change the capacity of a table, that can be done as well: conn.update_table('Thread', read_capacity_units=20, write_capacity_units=20) # You can create tables as well, although the syntax is verbose. You should really use the model API instead, # but here is a low level example to demonstrate the point: kwargs = {
def test_condition_check__no_condition(self): with pytest.raises(TypeError): with TransactWrite(connection=Connection()) as transaction: transaction.condition_check(MockModel, hash_key=1, condition=None)
def test_commit__not_implemented(self): t = Transaction(connection=Connection()) with pytest.raises(NotImplementedError): t._commit()
import os import boto3 import urllib import pyexcel as pe from pynamodb.connection import Connection # database permit model from models import PermitsModel # Our created utils from utils import (write_records, create_permit, adjust_capacity) s3 = boto3.client('s3') Permits = PermitsModel('eu-central-1', 'https://dynamodb.eu-central-1.amazonaws.com') conn = Connection(region='eu-central-1', host='https://dynamodb.eu-central-1.amazonaws.com') def lambda_handler(event, context): # get table name from CloudFormation template dynamo_table = os.environ['TableName'] # set temp file # '/tmp' is directory to write to inside Lambda function container report_file = '/tmp/report.xlsx' # Get bucket and key from PUT event bucket = event['Records'][0]['s3']['bucket']['name'].encode('utf8') key = urllib.unquote_plus( event['Records'][0]['s3']['object']['key'].encode('utf8'))
def test_connection_integration(ddb_url): table_name = 'pynamodb-ci-connection' # For use with a fake dynamodb connection # See: http://aws.amazon.com/dynamodb/developer-resources/ conn = Connection(host=ddb_url) print(conn) print("conn.describe_table...") table = None try: table = conn.describe_table(table_name) except TableDoesNotExist: params = { 'read_capacity_units': 1, 'write_capacity_units': 1, 'attribute_definitions': [{ 'attribute_type': STRING, 'attribute_name': 'Forum' }, { 'attribute_type': STRING, 'attribute_name': 'Thread' }, { 'attribute_type': STRING, 'attribute_name': 'AltKey' }, { 'attribute_type': NUMBER, 'attribute_name': 'number' }], 'key_schema': [{ 'key_type': HASH, 'attribute_name': 'Forum' }, { 'key_type': RANGE, 'attribute_name': 'Thread' }], 'global_secondary_indexes': [{ 'index_name': 'alt-index', 'key_schema': [{ 'KeyType': 'HASH', 'AttributeName': 'AltKey' }], 'projection': { 'ProjectionType': 'KEYS_ONLY' }, 'provisioned_throughput': { 'ReadCapacityUnits': 1, 'WriteCapacityUnits': 1, } }], 'local_secondary_indexes': [{ 'index_name': 'view-index', 'key_schema': [{ 'KeyType': 'HASH', 'AttributeName': 'Forum' }, { 'KeyType': 'RANGE', 'AttributeName': 'AltKey' }], 'projection': { 'ProjectionType': 'KEYS_ONLY' } }] } print("conn.create_table...") conn.create_table(table_name, **params) while table is None: time.sleep(1) table = conn.describe_table(table_name) while table['TableStatus'] == 'CREATING': time.sleep(2) table = conn.describe_table(table_name) print("conn.list_tables") conn.list_tables() print("conn.update_table...") conn.update_table(table_name, read_capacity_units=table.get( PROVISIONED_THROUGHPUT).get(READ_CAPACITY_UNITS) + 1, write_capacity_units=2) table = conn.describe_table(table_name) while table['TableStatus'] != 'ACTIVE': time.sleep(2) table = conn.describe_table(table_name) print("conn.put_item") conn.put_item( table_name, 'item1-hash', range_key='item1-range', attributes={'foo': { 'S': 'bar' }}, condition=NotExists(Path('Forum')), ) conn.get_item(table_name, 'item1-hash', range_key='item1-range') conn.delete_item(table_name, 'item1-hash', range_key='item1-range') items = [] for i in range(10): items.append({"Forum": "FooForum", "Thread": f"thread-{i}"}) print("conn.batch_write_items...") conn.batch_write_item(table_name, put_items=items) print("conn.batch_get_items...") data = conn.batch_get_item(table_name, items) print("conn.query...") conn.query( table_name, "FooForum", range_key_condition=(BeginsWith(Path('Thread'), Value('thread'))), ) print("conn.scan...") conn.scan(table_name, ) print("conn.delete_table...") conn.delete_table(table_name)
# AWS Lambda if (os.environ.get("LAMBDA_TASK_ROOT", None) is not None # Running inside Lambda and bool(os.getenv("AWS_SAM_LOCAL")) is False # But not on SAM local ): return None # SAM Local # `dynamodb` is the service name inside # docker-compose.yml return "http://dynamodb:8000" host = get_nosql_database_url() connection = (Connection(host=get_nosql_database_url()) if host else Connection(region=os.environ["AWS_REGION"])) logger.debug(f"Session Registry created for {connection}") class Base(Model): """Base relational class. Use this class to add functionality to PynamoDB Base class. """ created_at = UTCDateTimeAttribute(default=get_time_now) updated_at = UTCDateTimeAttribute(default=get_time_now) cls = DiscriminatorAttribute()
def updateOrder(request): body = request.body.decode('utf-8') params = json.loads(body) shop_domain = params.get('shop_domain', 'nothing') order_status = params.get('order_status', 'nothing') #use in future is_digital = params.get('is_digital', False) #use in future shipping_company = params.get('shipping_company', 'nothing') #use in future tracking_no = params.get('tracking_no', 'nothing') #use in future print(shop_domain) conn = Connection(region='us-east-1') table = conn.list_tables() personal_data = conn.query('updateOrder', shop_domain) if personal_data == {}: data = {'status': 'Failure'} dump = json.dumps(data) return HttpResponse(dump, content_type='application/json') print(personal_data) for item in personal_data['Items']: print(item['username']['S']) comment_numper = 4 driver = webdriver.Chrome('chromedriver') driver.get("https://news.ycombinator.com/news") driver.maximize_window() time.sleep(1) elem = driver.find_element_by_xpath( '//a[@href="login?goto=news"]' ) # driver.find_element_by_link_text("login") ActionChains(driver).move_to_element(elem).click().perform() elem = driver.find_element_by_name("acct") ActionChains(driver).move_to_element(elem).click().perform() ActionChains(driver).send_keys(item['username']['S']).perform() elem = driver.find_element_by_name("pw") ActionChains(driver).move_to_element(elem).click().perform() ActionChains(driver).send_keys(item['password']['S']).perform() ActionChains(driver).send_keys(Keys.RETURN).perform() driver.get("https://news.ycombinator.com/news") time.sleep(1) upvote_elems = driver.find_elements_by_class_name("votearrow") ActionChains(driver).move_to_element(upvote_elems[random.randint( 0, len(upvote_elems) - 1)]).click().perform() time.sleep(1) comment_elems = driver.find_elements_by_partial_link_text("comment") ActionChains(driver).move_to_element( comment_elems[int(comment_numper)]).click().perform() write_comment_element = driver.find_element_by_xpath( '//textarea[@name="text"]') ActionChains(driver).move_to_element( write_comment_element).click().perform() ActionChains(driver).send_keys( 'This is one of the best test comments ever. Its bigly.').perform( ) submit_element = driver.find_element_by_xpath( '//input[@type="submit"]') ActionChains(driver).move_to_element(submit_element).click().perform() time.sleep(1) driver.get("https://news.ycombinator.com/news") logout_elem = driver.find_element_by_id("logout") ActionChains(driver).move_to_element(logout_elem).click().perform() driver.close() data = {'status': 'Success'} dump = json.dumps(data) return HttpResponse(dump, content_type='application/json')
def connection(ddb_url): yield Connection(host=ddb_url)
def test_commit(self, mocker): connection = Connection() mock_connection_transact_write = mocker.patch.object( connection, 'transact_write_items') with patch(PATCH_METHOD) as req: req.return_value = MOCK_TABLE_DESCRIPTOR with TransactWrite(connection=connection) as t: t.condition_check( MockModel, 1, 3, condition=(MockModel.mock_hash.does_not_exist())) t.delete(MockModel(2, 4)) t.save(MockModel(3, 5)) t.update(MockModel(4, 6), actions=[MockModel.mock_toot.set('hello')], return_values='ALL_OLD') expected_condition_checks = [{ 'ConditionExpression': 'attribute_not_exists (#0)', 'ExpressionAttributeNames': { '#0': 'mock_hash' }, 'Key': { 'MockHash': { 'N': '1' }, 'MockRange': { 'N': '3' } }, 'TableName': 'mock' }] expected_deletes = [{ 'ConditionExpression': 'attribute_not_exists (#0)', 'ExpressionAttributeNames': { '#0': 'mock_version' }, 'Key': { 'MockHash': { 'N': '2' }, 'MockRange': { 'N': '4' } }, 'TableName': 'mock' }] expected_puts = [{ 'ConditionExpression': 'attribute_not_exists (#0)', 'ExpressionAttributeNames': { '#0': 'mock_version' }, 'Item': { 'MockHash': { 'N': '3' }, 'MockRange': { 'N': '5' }, 'mock_version': { 'N': '1' } }, 'TableName': 'mock' }] expected_updates = [{ 'ConditionExpression': 'attribute_not_exists (#0)', 'TableName': 'mock', 'Key': { 'MockHash': { 'N': '4' }, 'MockRange': { 'N': '6' } }, 'ReturnValuesOnConditionCheckFailure': 'ALL_OLD', 'UpdateExpression': 'SET #1 = :0, #0 = :1', 'ExpressionAttributeNames': { '#0': 'mock_version', '#1': 'mock_toot' }, 'ExpressionAttributeValues': { ':0': { 'S': 'hello' }, ':1': { 'N': '1' } } }] mock_connection_transact_write.assert_called_once_with( condition_check_items=expected_condition_checks, delete_items=expected_deletes, put_items=expected_puts, update_items=expected_updates, client_request_token=None, return_consumed_capacity=None, return_item_collection_metrics=None)
import random from django.utils import timezone from django.conf import settings from organisations.serializers import OrganisationSerializer, ProjectSerializer, EmployeeSerializer, \ ProjectEmployeeSerializer from organisations.models import Organisation, Project, Employee from faker import Faker from pynamodb.connection import Connection conn = Connection(host='http://localhost:8080', region='localhost') fake = Faker() def generate_organisations(): for _ in range(40): print("Generating organisation") serializer = OrganisationSerializer( data=dict(name=fake.word(), tier=fake.word())) if serializer.is_valid(): serializer.create(serializer.validated_data) def update_organisation_tiers(): organisations = Organisation.scan() tiers = ['tier_one', 'tier_two', 'tier_three']
office_out_of_date.employees.remove(garrett) with assert_condition_check_fails(): office_out_of_date.save() # After refreshing the local copy the operation will succeed. office_out_of_date.refresh() office_out_of_date.employees.remove(garrett) office_out_of_date.save() assert office_out_of_date.version == 3 # Condition check fails for delete. with assert_condition_check_fails(): office.delete() # Example failed transactions. connection = Connection(host='http://dynamodb:8000') with assert_condition_check_fails(), TransactWrite( connection=connection) as transaction: transaction.save(Office(office.office_id, name='newer name', employees=[])) with assert_condition_check_fails(), TransactWrite( connection=connection) as transaction: transaction.update(Office(office.office_id, name='newer name', employees=[]), actions=[ Office.name.set('Newer Office Name'), ]) with assert_condition_check_fails(), TransactWrite(
def list_table(): from pynamodb.connection import Connection conn = Connection(host=DB_HOST, region=REGION) tables = conn.list_tables() print(tables) return tables
def test_commit_auto_version_condition(self, mocker): connection = Connection() mock_connection_transact_write = mocker.patch.object( connection, 'transact_write_items') self._handle_transact_write(connection, False) expected_condition_checks = [{ 'ConditionExpression': 'attribute_not_exists (#0)', 'ExpressionAttributeNames': { '#0': 'mock_hash' }, 'Key': { 'MockHash': { 'N': '1' }, 'MockRange': { 'N': '3' } }, 'TableName': 'mock' }] expected_deletes = [{ 'Key': { 'MockHash': { 'N': '2' }, 'MockRange': { 'N': '4' } }, 'TableName': 'mock' }] expected_puts = [{ 'Item': { 'MockHash': { 'N': '3' }, 'MockRange': { 'N': '5' }, 'mock_version': { 'N': '1' } }, 'TableName': 'mock' }] expected_updates = [{ 'TableName': 'mock', 'Key': { 'MockHash': { 'N': '4' }, 'MockRange': { 'N': '6' } }, 'ReturnValuesOnConditionCheckFailure': 'ALL_OLD', 'UpdateExpression': 'SET #0 = :0 ADD #1 :1', 'ExpressionAttributeNames': { '#0': 'mock_toot', '#1': 'mock_version' }, 'ExpressionAttributeValues': { ':0': { 'S': 'hello' }, ':1': { 'N': '1' } } }] mock_connection_transact_write.assert_called_once_with( condition_check_items=expected_condition_checks, delete_items=expected_deletes, put_items=expected_puts, update_items=expected_updates, client_request_token=None, return_consumed_capacity=None, return_item_collection_metrics=None)
""" Examples using a connection """ from pynamodb.connection import Connection # Get a connection conn = Connection(host='http://localhost:8000') print(conn) # List tables print(conn.list_tables()) # Describe a table print(conn.describe_table('Thread')) # Get an item print(conn.get_item('Thread', 'hash-key', 'range-key')) # Put an item conn.put_item('Thread', 'hash-key', 'range-key', attributes={ 'forum_name': 'value', 'subject': 'value' }) # Delete an item conn.delete_item('Thread', 'hash-key', 'range-key')
""" Runs tests against dynamodb """ from __future__ import print_function import time import config as cfg from pynamodb.connection import Connection from pynamodb.constants import PROVISIONED_THROUGHPUT, READ_CAPACITY_UNITS from pynamodb.types import STRING, HASH, RANGE, NUMBER table_name = 'pynamodb-ci' # For use with a fake dynamodb connection # See: http://aws.amazon.com/dynamodb/developer-resources/ conn = Connection(host=cfg.DYNAMODB_HOST) print(conn) print("conn.describe_table...") table = conn.describe_table(table_name) if table is None: params = { 'read_capacity_units': 1, 'write_capacity_units': 1, 'attribute_definitions': [{ 'attribute_type': STRING, 'attribute_name': 'Forum' }, { 'attribute_type': STRING, 'attribute_name': 'Thread'