def lambda_handler(event, context): """ Lambda function that runs on cloudformation create. It reads through all the strategies to get their schema, and creates the settings in the dynamodb table. """ ddb_client = boto3.resource('dynamodb') table = ddb_client.Table(os.environ['STRATEGIES_TABLE']) #To-do: Relative import of strategies modules and loop to insert them all. try: table.put_item(Item=grid1.DEFAULT_SETTINGS) except Exception as e: _LOGGER.error( 'Unable stick initial strategy settings into DDB. {0}'.format(e)) send(event, context, FAILED) return send(event, context, SUCCESS) return
def lambda_handler(event, context): """ Lambda function that runs on cloudformation create. It populates the metrics definition table with information about each metric. """ ddb_client = boto3.resource('dynamodb') table = ddb_client.Table(os.environ['METRICSDEF_TABLE']) metrics_def = SettingsDefinition() #This just gets a list of funcs in SettingsDefinition via tuples. function_list = inspect.getmembers(metrics_def, predicate=inspect.isfunction) for item in function_list: try: table.put_item(Item = item[1]()) except Exception as e: _LOGGER.error('Unable stick metric definition into DDB. {0}'.format(e)) send(event, context, FAILED) return send(event, context, SUCCESS) return
def lambda_handler(event, context): """ Lambda function that runs on cloudformation create and update. It auto-populates config in the global settings dynamodb table. This way settings can be viewed/changed by the frontend. """ ddb_client = boto3.resource('dynamodb') table = ddb_client.Table(os.environ['GLOBALSETTINGS_TABLE']) try: table.put_item(Item = { 'globalConfig': 'SET', 'GlobalTradingEnabled': False, 'ShareResults': False, 'ResultsUsername': '', 'MobileNumber': '' }) except Exception as e: _LOGGER.error('Unable stick initial global settings into DDB.') send(event, context, FAILED) return send(event, context, SUCCESS) return
def lambda_handler(event, context): """ Lambda function that runs on cloudformation create and update. Unzips the website static files build and uploads it to s3. """ current_directory = os.getcwd() website_version = os.environ['WEBSITE_VERSION'] _LOGGER.info('Unzipping {cwd}/website_{ver}.zip'.format( cwd=current_directory, ver=website_version)) try: zipfile_location = '{cwd}/website_{ver}.zip'.format( cwd=current_directory, ver=website_version) except KeyError: _LOGGER.error( 'Missing environment variables for lambda to prepare content.') send(event, context, FAILED) return #Unzip. Lambda can only write to /tmp. with zipfile.ZipFile(zipfile_location, 'r') as zip_ref: zip_ref.extractall('/tmp') assets_folder = '/tmp/build' #Walk the files unzipped to get a full path list of what needs #to be uploaded to s3. file_list = [] for root, _, filenames in os.walk(assets_folder): for filename in filenames: file_list.append(os.path.join(root, filename)) bucket_name = os.environ['S3_STATIC_ASSETS_BUCKET'] s3_resource = boto3.resource('s3') destination_bucket = s3_resource.Bucket(bucket_name) _LOGGER.info('Cleaning out old items from the bucket.') #Truncate the bucket (delete all files in it) before uploading. try: destination_bucket.objects.all().delete() except Exception as e: _LOGGER.error('Problem truncating bucket. {0}'.format(e)) send(event, context, FAILED) return _LOGGER.info( 'Copying static assets version {ver} to S3 bucket {buk}.'.format( ver=website_version, buk=bucket_name)) #Upload. for file in file_list: try: destination_bucket.upload_file( Filename=file, Key=file.split(assets_folder)[1].strip('/'), ExtraArgs={ 'ACL': 'public-read', 'ContentType': match_content_type(file) }) except Exception as e: _LOGGER.error('Problem uploading file to s3. {0}'.format(e)) send(event, context, FAILED) return send(event, context, SUCCESS) return
def lambda_handler(event, context): """ Lambda function that runs on cloudformation create. Creates a default Cognito user. """ alphabet = string.ascii_letters + string.digits password_length = 12 _password = ''.join( secrets.choice(alphabet) for i in range(password_length)) default_user_email = '*****@*****.**' #Verify lambda is running as part of a cloudformation event trigger. try: _LOGGER.info('Cloudformation event {0}.'.format(event['RequestType'])) except KeyError: _LOGGER.error('Unable to determine cloudformation event type.') send(event, context, FAILED) return #Create the user. try: cognito_client = boto3.client('cognito-idp') cognito_response = cognito_client.sign_up( ClientId=os.environ['COGNITO_USER_POOL_CLIENT_ID'], Username=default_user_email, Password=_password, UserAttributes=[{ 'Name': 'email', 'Value': default_user_email }]) _LOGGER.info( 'Cognito user creation response: {0}.'.format(cognito_response)) #Extract and return the UUID of the user so this can be consumed later on. cognito_user_uuid = cognito_response['UserSub'] except Exception as e: _LOGGER.error('Unable to create Cognito default user. {0}'.format(e)) send(event, context, FAILED) return #Put the password into SSM. Uses default kms key. #Overwrite since stack deletion doesn't neccessarily delete the SSM param. try: ssm_client = boto3.client('ssm') ssm_response = ssm_client.put_parameter( Name='Cognito_admin_user_password', Description= 'Password for the default user in Cognito to access the stack manager.', Value=_password, Type='SecureString', Overwrite=True) _LOGGER.info('SSM put parameter response: {0}.'.format(ssm_response)) except Exception as e: _LOGGER.error('Unable stick password into SSM. {0}'.format(e)) send(event, context, FAILED) return send(event, context, SUCCESS, response_data={'CognitoUserId': cognito_user_uuid}) return