def lam(lambda_functions, environment): for lam in lambda_functions: lam["environment"] = {**environment(), **lam.get("environment", {})} with b3f.utils.set_env(environment()): with moto.mock_lambda(): with b3f.Service("awslambda", lambda_functions): yield
def setup(self): """Alert Merger - Setup""" self.dynamo_mock = mock_dynamodb2() self.lambda_mock = mock_lambda() self.dynamo_mock.start() self.lambda_mock.start() create_lambda_function(_ALERT_PROCESSOR, 'us-east-1') setup_mock_alerts_table(_ALERTS_TABLE) self.merger = main.AlertMerger.get_instance()
def requireMocking(): """ method which should be called before all other methods in tests. It basically configures our mocking context for stasis """ bucket = moto.mock_s3() bucket.start() sns = moto.mock_sns() sns.start() sqs = moto.mock_sqs() sqs.start() dynamo = moto.mock_dynamodb2() dynamo.start() lamb = moto.mock_lambda() lamb.start() ecs = moto.mock_ecs() ecs.start() ec2 = moto.mock_ec2() ec2.start() ecr = moto.mock_ecr() ecr.start() session = boto3.session.Session() session.client('sns') session.client('s3') dynamodb = boto3.resource('dynamodb') yield sqs.stop() sns.stop() dynamo.stop() lamb.stop() bucket.stop() ecs.stop() ec2.stop() ecr.stop() pass
def setup_aws_lambda( region_name: str, func_name: str, func_str: str, environment: Dict[str, str] = {} ): with mock_lambda(): lambda_client = boto3.client("lambda", region_name) lambda_client.create_function( FunctionName=func_name, Runtime="python3.6", Role="test-iam-role", Handler="lambda_function.lambda_handler", Code={"ZipFile": _process_lambda(func_str)}, Environment={"Variables": environment}, Description="test lambda function", Timeout=3, MemorySize=128, Publish=True, ) yield
def setUp(self): """Set environment variables and setup the mocks.""" os.environ['BATCH_LAMBDA_NAME'] = 'test_batch_lambda_name' os.environ['BATCH_LAMBDA_QUALIFIER'] = 'Production' os.environ['OBJECTS_PER_MESSAGE'] = '2' os.environ['S3_BUCKET_NAME'] = 'test_s3_bucket' os.environ['SQS_QUEUE_URL'] = 'https://sqs.us-east-1.amazonaws.com/1234/test_queue' self._mocks = [moto.mock_cloudwatch(), moto.mock_lambda(), moto.mock_s3(), moto.mock_sqs()] for mock in self._mocks: mock.start() # Import batch lambda handler _after_ the mocks have been initialized. from lambda_functions.batcher import main self.batcher_main = main self._bucket = boto3.resource('s3').Bucket(os.environ['S3_BUCKET_NAME']) self._bucket.create() response = boto3.client('sqs').create_queue(QueueName='test_queue') self._queue = boto3.resource('sqs').Queue(response['QueueUrl'])
def requireMocking(): """ method which should be called before all other methods in tests. It basically configures our mocking context for stasis """ lamb = moto.mock_lambda() lamb.start() os.environ['current_stage'] = 'test' os.environ[ 'carrot_host'] = 'lc-binbase-dev.czbqhgrlaqbf.us-west-2.rds.amazonaws.com' os.environ['carrot_port'] = '5432' os.environ['carrot_username'] = '******' os.environ['carrot_password'] = '******' os.environ['carrot_database'] = 'carrot-test' yield lamb.stop() pass
def __init__(self, pipeline): print('In Init') self.s3_mock = mock_s3() self.lambda_mock = mock_lambda() self.pipeline = pipeline
def handler_func(event, context): from datetime import datetime file = '/Users/rabraham/dev-thescore/analytics/jaya/tmp/rajiv_tries.txt' with open(file, 'a') as f: f.write('\nHello Rajiv:' + str(datetime.utcnow())) if __name__ == '__main__': import io import boto3 from jaya.lib import aws # *********** lambda_mock = mock_lambda() lambda_mock.start() from jaya.core import AWSLambda import json lambda_name = 'print-lambda' us_east = 'us-east-1' print_lambda = AWSLambda(lambda_name, handler_func, us_east) deploy_lambda.deploy_lambda_package_local(print_lambda) conn = boto3.client('lambda', us_east) # success_result = conn.invoke(FunctionName=lambda_name, # InvocationType='RequestResponse', # Payload=json.dumps({'hi': 'Rajiv'})) lambda_mock.stop() # ***********
def test_lambda_execute_ecs_task(mock=False): """ Test for lambda function that creates ecs tasks :param mock: True: tests using moto lib, False: tests using real cloud env (make sure you have your default creds set) :return: """ ms3 = mock_s3() ml = mock_lambda() mecs = mock_ecs() mlogs = mock_logs() if mock: ms3.start() ml.start() mecs.start() mlogs.start() bucket_name = 'testamolbucket' key_name = 'taskarn.list' # path to file that contains task ARNs script_dir = os.getcwd() lambda_function_filepath = r'..\..\scripts' lambda_function_file = 'LambdaExecuteEcsTask.py' assert os.path.exists( os.path.join(lambda_function_filepath, lambda_function_file)), 'File does not exist!' lambda_function_name = 'testfunction' lambda_execution_role = 'arn:aws:iam::123456789:role/test-amol-role-lambda' lambda_zip_file = '.'.join([lambda_function_file.split('.')[0], 'zip']) os.chdir(lambda_function_filepath) lambda_zip_filepath = r'..\test\outputs\{0}'.format(lambda_zip_file) with zipfile.ZipFile(lambda_zip_filepath, 'w') as myzip: myzip.write(lambda_function_file) _upload_file_to_bucket(lambda_zip_filepath, bucket_name, lambda_zip_file) _create_lambda_function(lambda_function_name, 'LambdaExecuteEcsTask.lambda_handler', lambda_execution_role, lambda_zip_file, bucket_name) s3_event_payload_data = { 'Records': [{ 's3': { 'object': { 'key': 'testkey' }, 'bucket': { 'name': bucket_name } } }] } os.chdir(script_dir) _upload_file_to_bucket(key_name, bucket_name, key_name) if not mock: # lambda function which uses boto lib cannot be mocked, but invoking the lambda can be mocked in any case # Note: This lambda function invocation has already been validated in real AWS environment. _invoke_lambda_function('testfunction', json.dumps(s3_event_payload_data))
def apply_moto_mocks(): with mock_cloudformation(), mock_iam(), mock_s3(), mock_lambda(), mock_kms( ): boto3.setup_default_session() yield None
def lambda_client(aws_credentials): with mock_lambda(): yield boto3.client('lambda', region_name=DEFAULT_REGION)
def aws_lambda(aws_credentials): with mock_lambda(): yield boto3.client('lambda', region_name='us-east-1')