def create_mockinstance_doomed(monkeypatch): """Creates a mock instance for the purpose of getting and modifying tags. Doomed because the relevant timeouts are set to 0 minutes. """ session = localstack_client.session.Session() ec2_resource = session.resource('ec2') ec2_client = session.client('ec2') monkeypatch.setattr(ec2_rogue_killer, 'ec2_client', session.client('ec2')) output = ec2_resource.create_instances(ImageId='garbage', InstanceType='t2.micro', MinCount=1, MaxCount=1, TagSpecifications=[{ 'ResourceType': 'volume', 'Tags': [{ 'Key': 'Timeout', 'Value': str(-1) }] }, { 'ResourceType': 'instance', 'Tags': [{ 'Key': 'Timeout', 'Value': str(-1) }] }]) yield output instance_id = output[0].instance_id ec2_client.terminate_instances(InstanceIds=[instance_id])
def create_mockinstance_ssm(monkeypatch): """Creates a mock instance without tags and with an ssm command running, mimicking deployment instances. """ session = localstack_client.session.Session() ec2_resource = session.resource('ec2') ec2_client = session.client('ec2') monkeypatch.setattr(ec2_rogue_killer, 'ec2_client', session.client('ec2')) output = ec2_resource.create_instances(ImageId='garbage', InstanceType='t2.micro', MinCount=1, MaxCount=1) ssm_client_local.send_command(DocumentName='AWS-RunShellScript', InstanceIds=[output[0].instance_id], Parameters={ 'commands': ['sleep 100; sleep 10'], 'executionTimeout': [str(3600)] }) def mockfunc(instance_info): return output[0].instance_id != instance_info['InstanceId'] yield (output, mockfunc) instance_id = output[0].instance_id ec2_client.terminate_instances(InstanceIds=[instance_id])
def create_mockinstance(monkeypatch): """Creates a mock instance for the purpose of getting and modifying tags. BEWARE: As this spins up a localstack instance, is a check of function only, not permissions. """ session = localstack_client.session.Session() ec2_resource = session.resource('ec2') ec2_client = session.client('ec2') monkeypatch.setattr(ec2_rogue_killer, 'ec2_client', session.client('ec2')) output = ec2_resource.create_instances(ImageId='garbage', InstanceType='t2.micro', MinCount=1, MaxCount=1, TagSpecifications=[{ 'ResourceType': 'volume', 'Tags': [{ 'Key': 'Timeout', 'Value': str(100) }] }, { 'ResourceType': 'instance', 'Tags': [{ 'Key': 'Timeout', 'Value': str(100) }] }]) yield output instance_id = output[0].instance_id ec2_client.terminate_instances(InstanceIds=[instance_id])
def create_mockinstance_untagged(monkeypatch): """Creates a mock instance for the purpose of getting and modifying tags. BEWARE: As this spins up a localstack instance, is a check of function only, not permissions. """ session = localstack_client.session.Session() ec2_resource = session.resource('ec2') ec2_client = session.client('ec2') monkeypatch.setattr(ec2_rogue_killer, 'ec2_client', session.client('ec2')) output = ec2_resource.create_instances(ImageId='garbage', InstanceType='t2.micro', MinCount=1, MaxCount=1) yield output instance_id = output[0].instance_id ec2_client.terminate_instances(InstanceIds=[instance_id])
def mock_resources(monkeypatch): ## mock s3 resources: monkeypatch.setattr( s3, "s3_client", session.client("s3") ) ## TODO I don't think these are scoped correctly w/o a context manager. monkeypatch.setattr(s3, "s3_resource", session.resource("s3")) ## mock ssm resources: monkeypatch.setattr(ssm, "ssm_client", session.client("ssm")) ## mock ec2 resources: monkeypatch.setattr(ec2, "ec2_resource", session.resource("ec2")) monkeypatch.setattr(ec2, "ec2_client", session.client("ec2")) ## mock events: monkeypatch.setattr(events, "events", session.client("events")) ## mock pricing #monkeypatch.setattr(pricing,"client",session.client("pricing")) monkeypatch.setattr(pricing, "ec2client", session.client("ec2"))
def make_bucket(s3_connection=None, bucket_name=None, acl='public-read'): try: if not s3_connection: s3_connection = session.client('s3') response = s3_connection.create_bucket(Bucket=bucket_name, ACL=acl) return response except Exception as e: logging.error(f'Error creating the bucket - {bucket_name}', exc_info=True)
def test_lambda_handler(sqs_ingest_data_event): """ Loads the event file in JSON """ # Setup session = localstack_client.session.Session() function.sqs_client = session.client('sqs') # Act function.lambda_handler(sqs_ingest_data_event, "") # Assert assert 1 == 1
def test_user_policy(tags, responsecodes): session = boto3.Session(profile_name="testdev") ec2_client = session.client("ec2") ec2_resource = session.resource("ec2") try: ec2_resource.create_instances(ImageId="ami-07ebfd5b3428b6f4d", InstanceType="t2.micro", MinCount=1, MaxCount=1, DryRun=True, TagSpecifications=tags) except ClientError as e: assert e.response["Error"]["Code"] == responsecodes
def init(): """Initialize the environment for development and test""" s3_client = session.client('s3') s3_resource = session.resource('s3') make_bucket(s3_client, CLIENT_S3_BUCKET) make_bucket(s3_client, JSON_S3_BUCKET) file_key = 'test-file.xml' file = open(f'src/uploads/{file_key}', "r") upload_file_to_bucket(s3_connection=s3_resource, bucket_name=CLIENT_S3_BUCKET, file_key=file_key, data=file.read())
def create_lambda(monkeypatch): """Sets up the module to use localstack, and creates a lambda function in localstack called test-lambda. Source code taken from ./test_mats/testmainlambda.zip. """ session = localstack_client.session.Session() lambda_client = session.client("lambda") lambda_resource = session.resource("lambda") lambda_client.create_function( FunctionName="test-lambda", Runtime='python3.6', Role='todo', Handler='submit_start.handler', Description="test Lambda Function for Serverless", MemorySize=128, )
def create_mockinstance(monkeypatch): """Creates a mock instance for the purpose of getting and modifying tags. BEWARE: As this spins up a localstack instance, is a check of function only, not permissions. """ session = localstack_client.session.Session() ec2_resource = session.resource("ec2") ec2_client = session.client("ec2") monkeypatch.setattr(develop_blueprint, "ec2_resource", session.resource("ec2")) monkeypatch.setattr(develop_blueprint, "ec2_client", session.client("ec2")) output = ec2_resource.create_instances(ImageId="garbage", InstanceType="t2.micro", MinCount=1, MaxCount=1, TagSpecifications=[{ "ResourceType": "volume", "Tags": [{ "Key": "Timeout", "Value": str(timeout_init) }] }, { "ResourceType": "instance", "Tags": [{ "Key": "Timeout", "Value": str(timeout_init) }] }]) yield output instance_id = output[0].instance_id ec2_client.terminate_instances(InstanceIds=[instance_id])
# LocalStack https://github.com/localstack/localstack # LocalStack Client https://github.com/localstack/localstack-python-client # boto3 https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/sqs.html import localstack_client.session import time import json import random session = localstack_client.session.Session() sqs = session.client('sqs') queue = sqs.create_queue( QueueName = 'digester', Attributes = { 'FifoQueue': 'false' } ) sqs.purge_queue( QueueUrl = queue['QueueUrl'] ) counter = 1 types = ['Type1', 'Type2', 'Type3', 'Type4', 'Type5'] while True: message = { 'id': counter,
def test_protected_client_kwargs_not_passed(): """ Test protected kwargs not overwritten in boto3.client creation """ session = localstack_client.session.Session() kwargs = {'region_name': 'another_region'} sqs = session.client('sqs', **kwargs) assert not sqs.meta.region_name == 'another_region'
def test_client_kwargs_passed(): """ Test kwargs passed through to boto3.client creation """ session = localstack_client.session.Session() kwargs = {'config': Config(signature_version='s3v4')} sqs = session.client('sqs', **kwargs) assert sqs.meta.config.signature_version == 's3v4'
from ncap_iac.protocols.utilsparam import s3, ssm, ec2, events, pricing import pytest from test_submit_start import setup_lambda_env, get_paths, user_name import os here = os.path.abspath(os.path.dirname(__file__)) test_log_mats = os.path.join(here, "test_mats", "logfolder") bucket_name = "cianalysispermastack" key_name = "test_user/submissions/submit.json" fakedatakey_name = "test_user/submissions/fakedatasubmit.json" fakeconfigkey_name = "test_user/submissions/fakeconfigsubmit.json" notimestampkey_name = "test_user/submissions/notimestampconfigsubmit.json" nodatakey_name = "test_user/submissions/notimestampconfigsubmit.json" noconfigkey_name = "test_user/submissions/notimestampconfigsubmit.json" ## set up mock client and resources. s3_client = session.client("s3") s3_resource = session.resource("s3") ## The lambda function logic we use is monitor_updater:q @pytest.fixture() def mock_resources(monkeypatch): ## mock s3 resources: monkeypatch.setattr( s3, "s3_client", session.client("s3") ) ## TODO I don't think these are scoped correctly w/o a context manager. monkeypatch.setattr(s3, "s3_resource", session.resource("s3")) ## mock ssm resources: monkeypatch.setattr(ssm, "ssm_client", session.client("ssm")) ## mock ec2 resources: monkeypatch.setattr(ec2, "ec2_resource", session.resource("ec2"))
def make_local_client(): session = localstack_client.session.Session() return session.client('secretsmanager')
'exemptlist': 'i-029339d0ff4fa4318,i-0c366d0e991bc6fde,i-04a5f4794bafda3b1,i-0a7b60fe2661444da', 'dryrun': '1', 'topicarn': 'arn:aws:sns:us-east-1:739988523141:EC2_Instance_Sweeper', 'localstack': '1', 'AWS_ACCCESS_KEY_ID': 'foo', 'AWS_SECRET_ACCESS_KEY': 'bar', 'LOCALSTACK_HOSTNAME': 'localhost' } for var in env_vars.items(): os.environ[var[0]] = var[1] import ncap_iac.permissions.management_lambdas.ec2_rogue_killer as ec2_rogue_killer ## this is eseentially the function ec2_rogue_killer session = localstack_client.session.Session() ec2_resource_local = session.resource('ec2') ec2_client_local = session.client('ec2') ssm_client_local = session.client('ssm') @pytest.fixture() def set_ssm_exempt(monkeypatch): monkeypatch.setattr(ec2_rogue_killer, 'ssm_client', ssm_client_local) ssm_client_local.put_parameter(Name='exempt_instances', Type='String', Overwrite=True, Value=(env_vars['exemptlist'])) yield 'values' ssm_client_local.delete_parameter(Name='exempt_instances') @pytest.fixture()
import pytest import time import localstack_client.session import boto3 from botocore.exceptions import ClientError import ncap_iac.ncap_blueprints.dev_utils.develop_blueprint as develop_blueprint import os timeout_init = 5 real_test_instance = "i-0ce3833f4cce8fcdf" session = localstack_client.session.Session() ec2_resource_local = session.resource("ec2") ec2_client_local = session.client("ec2") @pytest.fixture() def use_devcred(monkeypatch): monkeypatch.setattr(develop_blueprint, "ec2_resource", boto3.Session(profile_name="testdev").resource("ec2")) monkeypatch.setattr(develop_blueprint, "ec2_client", boto3.Session(profile_name="testdev").client("ec2")) loc = os.path.dirname(os.path.abspath(__file__)) fixturetemplate = os.path.join(loc, "fixtures") @pytest.fixture() def create_realinstance(monkeypatch): """Creates a real instance for the purpose of getting and modifying tags
def setup_testing_bucket(monkeypatch): """Sets up a localstack bucket called cianalysispermastack with the following directory structure: / |-test_user |-inputs |-data.json |-configs |-config.json |-submissions |-submit.json |-logs |-active |-i-1234567890abcdef0.json |-i-superexpensive.json |-test_user |-joblog1 |-joblog2 ... """ subkeys = { "inputs/data1.json": { "data": "value" }, "inputs/data2.json": { "data": "value" }, "configs/config.json": { "param": "p1" }, "configs/fullconfig.json": { "param": "p1", "__duration__": 360, "__dataset_size__": 20, "ensemble_size": 5 }, "submissions/singlesubmit.json": { "dataname": os.path.join(user_name, "inputs", "data1.json"), "configname": os.path.join(user_name, "configs", "config.json"), "timestamp": "testtimestamp" }, "submissions/submit.json": { "dataname": [ os.path.join(user_name, "inputs", d) for d in ["data1.json", "data2.json"] ], "configname": os.path.join(user_name, "configs", "config.json"), "timestamp": "testtimestamp" }, "submissions/{}".format(os.path.basename(fakedatakey_name)): { "dataname": [ os.path.join(user_name, "inputs", d) for d in ["data21.json", "data22.json"] ], "configname": os.path.join(user_name, "configs", "config.json"), "timestamp": "testtimestamp" }, "submissions/{}".format(os.path.basename(fakeconfigkey_name)): { "dataname": [ os.path.join(user_name, "inputs", d) for d in ["data1.json", "data2.json"] ], "configname": os.path.join(user_name, "configs", "config22.json"), "timestamp": "testtimestamp" }, "submissions/{}".format(os.path.basename(nodatakey_name)): { "configname": os.path.join(user_name, "configs", "config22.json"), "timestamp": "testtimestamp" }, "submissions/{}".format(os.path.basename(noconfigkey_name)): { "dataname": [ os.path.join(user_name, "inputs", d) for d in ["data1.json", "data2.json"] ], "timestamp": "testtimestamp" }, "submissions/{}".format(os.path.basename(notimestampkey_name)): { "dataname": [ os.path.join(user_name, "inputs", d) for d in ["data1.json", "data2.json"] ], "configname": os.path.join(user_name, "configs", "config22.json") } } s3_client = session.client("s3") s3_resource = session.resource("s3") monkeypatch.setattr( s3, "s3_client", session.client("s3") ) ## TODO I don't think these are scoped correctly w/o a context manager. monkeypatch.setattr(s3, "s3_resource", session.resource("s3")) try: for sk in subkeys: obj = s3_client.get_object(Bucket=bucket_name, Key=os.path.join(user_name, sk)) s3_client.get_object(Bucket=bucket_name, Key="logs/test_user/i-0ff308d5c9b5786f3.json") except ClientError: ## Write data files s3_client.create_bucket(Bucket=bucket_name) for sk in subkeys: key = os.path.join(user_name, sk) writeobj = s3_resource.Object(bucket_name, key) content = bytes(json.dumps(subkeys[sk]).encode("UTF-8")) writeobj.put(Body=content) ## Write logs log_paths = get_paths(test_log_mats) try: for f in log_paths: s3_client.upload_file(os.path.join(test_log_mats, f), bucket_name, Key=f) except ClientError as e: logging.error(e) raise yield bucket_name, os.path.join(user_name, "submissions/submit.json")
def test_session(): session = localstack_client.session.Session() sqs = session.client('sqs') assert sqs.list_queues() is not None
environ['AWS_ACCESS_KEY_ID'] = 'foobar' environ['AWS_SECRET_ACCESS_KEY'] = 'foobar' environ['AWS_DEFAULT_REGION'] = 'foobar' # Resources to create resources = { 'SNS_EMAIL_TOPIC': 'awsBidsAlexaSns-local', 'SNS_COMPLIANCE_TOPIC': 'awsBidsComplianceSns-local', 'SNS_TAX_TOPIC': 'awsBidsTaxSns-local', 'SNS_DIRECTORS_TOPIC': 'awsBidsDirectorsSns-local', 'DYNAMODB_TABLE': 'localstack-bids-table' } # Clients session = localstack_client.session.Session() sns = session.client('sns') ddb = session.client('dynamodb', region_name='local', endpoint_url='http://localhost:4569') # Create resources for k, v in resources.items(): if 'TOPIC' in k: topic = sns.create_topic(Name=v) print('Created topic: %s' % topic) elif 'TABLE' in k: try: table = ddb.create_table(AttributeDefinitions=[ { 'AttributeName': 'ServiceName', 'AttributeType': 'S'