def __init__(self, logger, sm_input_list):
     self.logger = logger
     self.sm_input_list = sm_input_list
     self.list_sm_exec_arns = []
     self.s3 = S3(logger)
     self.solution_metrics = SolutionMetrics(logger)
     self.param_handler = CFNParamsHandler(logger)
     self.state_machine = StateMachine(logger)
     self.stack_set = StackSet(logger)
     self.wait_time = os.environ.get('WAIT_TIME')
     self.execution_mode = os.environ.get('EXECUTION_MODE')
Ejemplo n.º 2
0
def trigger(list_deployment_zip, push_deployment_zip, authorize_deployment_zip,
            docker_cred_deployment_zip, key_id, stack_name, template,
            deployment_stage):
    logger = get_logger(__name__)

    if not is_aws_cred_set():
        logger.error('AWS Creds are not set! Exiting!')
        sys.exit(1)

    if not is_db_envs_set():
        logger.error(
            'MongoDB environment vars needed for lambda functions are not set! Exiting!'
        )
        sys.exit(1)

    if not file_exists(filepath=list_deployment_zip) and not file_exists(
            filepath=push_deployment_zip):
        logger.error('Both zip file locations are invalid. Exiting!')
        sys.exit(1)

    if not file_exists(filepath=template):
        logger.error('Invalid cfn yaml. Exiting!')
        sys.exit(1)

    # TODO(Deepankar): this assumes that the lambda function exists already
    S3_DEFAULT_BUCKET = 'lambda-handlers-jina'
    s3 = S3(bucket=S3_DEFAULT_BUCKET)

    # TODO(Deepankar): can be put in a loop for all lambda functions
    if list_deployment_zip is not None:
        zip_filename = os.path.basename(list_deployment_zip)
        s3_list_key = f'hubapi_list/{key_id}/{zip_filename}'
        s3.put(filepath=list_deployment_zip, key=s3_list_key)

    if push_deployment_zip is not None:
        zip_filename = os.path.basename(push_deployment_zip)
        s3_push_key = f'hubapi_push/{key_id}/{zip_filename}'
        s3.put(filepath=push_deployment_zip, key=s3_push_key)

    if authorize_deployment_zip is not None:
        zip_filename = os.path.basename(authorize_deployment_zip)
        s3_authorize_key = f'hubapi_authorize/{key_id}/{zip_filename}'
        s3.put(filepath=authorize_deployment_zip, key=s3_authorize_key)

    if docker_cred_deployment_zip is not None:
        zip_filename = os.path.basename(docker_cred_deployment_zip)
        s3_docker_cred_key = f'docker_auth/{key_id}/{zip_filename}'
        s3.put(filepath=docker_cred_deployment_zip, key=s3_docker_cred_key)

    cfn_yml = read_file_content(filepath=template)

    hostname, username, password, database_name, hubpod_collection, metadata_collection, \
        docker_username, docker_password = read_environment()

    parameters = [{
        'ParameterKey': 'DefS3Bucket',
        'ParameterValue': S3_DEFAULT_BUCKET
    }, {
        'ParameterKey': 'HubListLambdaFnS3Key',
        'ParameterValue': s3_list_key
    }, {
        'ParameterKey': 'HubPushLambdaFnS3Key',
        'ParameterValue': s3_push_key
    }, {
        'ParameterKey': 'HubAPIAuthorizeLambdaFnS3Key',
        'ParameterValue': s3_authorize_key
    }, {
        'ParameterKey': 'DockerCredFetcherLambdaFnS3Key',
        'ParameterValue': s3_docker_cred_key
    }, {
        'ParameterKey':
        'DefLambdaRole',
        'ParameterValue':
        'arn:aws:iam::416454113568:role/lambda-role'
    }, {
        'ParameterKey': 'DeploymentStage',
        'ParameterValue': deployment_stage
    }, {
        'ParameterKey': 'JinaDBHostname',
        'ParameterValue': hostname
    }, {
        'ParameterKey': 'JinaDBUsername',
        'ParameterValue': username
    }, {
        'ParameterKey': 'JinaDBPassword',
        'ParameterValue': password
    }, {
        'ParameterKey': 'JinaHubpodCollection',
        'ParameterValue': hubpod_collection
    }, {
        'ParameterKey': 'JinaMetadataCollection',
        'ParameterValue': metadata_collection
    }, {
        'ParameterKey': 'JinaDBName',
        'ParameterValue': database_name
    }, {
        'ParameterKey': 'JinaDockerUsername',
        'ParameterValue': docker_username
    }, {
        'ParameterKey': 'JinaDockerPassword',
        'ParameterValue': docker_password
    }]

    try:
        with CFNStack(name=stack_name,
                      template=cfn_yml,
                      parameters=parameters,
                      delete_at_exit=False) as api_cfn_stack:
            if not api_cfn_stack.exists:
                logger.error(
                    f'Stack creation/update failed. Exiting context \n')
                sys.exit(1)
            logger.info('Resources description -- ')
            for resource in api_cfn_stack.resources:
                logger.info(
                    f'Name: `{resource["LogicalResourceId"]}`\t\tType: `{resource["ResourceType"]}`\t'
                    f'ID: `{resource["PhysicalResourceId"]}`')
    except (StackCreationFailed, StackUpdateFailed) as cfn_exp:
        logger.exception(
            f'Stack creation/update failed. Exiting context \n{cfn_exp}')
        sys.exit(1)
 def __init__(self, region):
     self.logger = logger
     self.param_handler = CFNParamsHandler(logger)
     self.manifest_folder = os.environ.get('MANIFEST_FOLDER')
     self.region = region
     self.s3 = S3(logger)
    def compare_template_and_params(self, sm_input, stack_name):

        self.logger.info("Comparing the templates and parameters.")
        template_compare, params_compare = False, False
        if stack_name:
            describe_response = self.stack_set\
                .describe_stack_set(stack_name)
            self.logger.info("Print Describe Stack Set Response: {}".format(
                describe_response))
            if describe_response is not None:
                self.logger.info("Found existing stack set.")

                operation_status_flag = self.get_stack_set_operation_status(
                    stack_name)

                if operation_status_flag:
                    self.logger.info("Continuing...")
                else:
                    return operation_status_flag, operation_status_flag

                # Compare template copy - START
                self.logger.info(
                    "Comparing the template of the StackSet:"
                    " {} with local copy of template".format(stack_name))

                template_http_url = sm_input.get('ResourceProperties')\
                    .get('TemplateURL', '')
                if template_http_url:
                    bucket_name, key_name, region = parse_bucket_key_names(
                        template_http_url)
                    local_template_file = tempfile.mkstemp()[1]

                    s3_endpoint_url = "https://s3.%s.amazonaws.com" % region
                    s3 = S3(self.logger,
                            region=region,
                            endpoint_url=s3_endpoint_url)
                    s3.download_file(bucket_name, key_name,
                                     local_template_file)
                else:
                    self.logger.error("TemplateURL in state machine input "
                                      "is empty. Check state_machine_event"
                                      ":{}".format(sm_input))
                    return False, False

                cfn_template_file = tempfile.mkstemp()[1]
                with open(cfn_template_file, "w") as f:
                    f.write(
                        describe_response.get('StackSet').get('TemplateBody'))
                # cmp function return true of the contents are same
                template_compare = filecmp.cmp(local_template_file,
                                               cfn_template_file, False)
                self.logger.info("Comparing the parameters of the StackSet"
                                 ": {} with local copy of JSON parameters"
                                 " file".format(stack_name))

                params_compare = True
                params = sm_input.get('ResourceProperties')\
                    .get('Parameters', {})
                # template are same - compare parameters (skip if template
                # are not same)
                if template_compare:
                    cfn_params = reverse_transform_params(
                        describe_response.get('StackSet').get('Parameters'))
                    for key, value in params.items():
                        if cfn_params.get(key, '') == value:
                            pass
                        else:
                            params_compare = False
                            break

                self.logger.info(
                    "template_compare={}; params_compare={}".format(
                        template_compare, params_compare))
            else:
                self.logger.info('Stack Set does not exist. '
                                 'Creating a new stack set ....')
                template_compare, params_compare = True, True
                # set this flag to create the stack set
                self.stack_set_exist = False

        return template_compare, params_compare
 def __init__(self, logger):
     self.logger = logger
     self.s3 = S3(logger)
     self.param_handler = CFNParamsHandler(logger)
     self.manifest = Manifest(os.environ.get('MANIFEST_FILE_PATH'))
     self.manifest_folder = os.environ.get('MANIFEST_FOLDER')
Ejemplo n.º 6
0
def config_deployer(event):
    try:
        s3 = S3(logger)

        # set variables
        source_bucket_name = event.get('BucketConfig', {})  \
            .get('SourceBucketName')
        key_name = event.get('BucketConfig', {}).get('SourceS3Key')
        destination_bucket_name = event.get('BucketConfig', {})  \
            .get('DestinationBucketName')
        input_zip_file_name = key_name.split("/")[-1] if "/" in key_name  \
            else key_name
        output_zip_file_name = event.get('BucketConfig', {})  \
            .get('DestinationS3Key')
        alias_name = event.get('KMSConfig', {}).get('KMSKeyAlias')
        policy = event.get('KMSConfig', {}).get('KMSKeyPolicy')
        flag_value = event.get('MetricsFlag')
        base_path = '/tmp/custom_control_tower'
        input_file_path = base_path + "/" + input_zip_file_name
        extract_path = base_path + "/" + 'extract'
        output_path = base_path + "/" + 'out'
        exclude_j2_files = []

        # Search for existing KMS key alias
        key_id = find_alias(alias_name)

        # if alias name not found in the list, create a new alias with
        # new target key
        if not key_id:
            key_id = create_cmk_with_alias(alias_name, policy)
            logger.info('Key ID created: {}'.format(key_id))
            kms.enable_key_rotation(key_id)
            logger.info('Automatic key rotation enabled.')
        else:
            logger.info('Key ID: {} found attached with alias: {}'.format(
                key_id, alias_name))
            logger.info('Updating KMS key policy')
            update_key_policy(key_id, policy)
            kms.enable_key_rotation(key_id)

        # Encrypt configuration bucket
        s3.put_bucket_encryption(destination_bucket_name, key_id)

        # Download the file from Solutions S3 bucket
        make_dir(base_path, logger)
        s3.download_file(source_bucket_name, key_name, input_file_path)

        # Unzip the config zip file
        unzip_function(input_zip_file_name, base_path, extract_path)

        # Find and replace the variable in Manifest file
        for item in event.get('FindReplace'):
            f = item.get('FileName')
            parameters = item.get('Parameters')
            exclude_j2_files.append(f)
            filename, file_extension = os.path.splitext(f)
            destination_file_path = extract_path + "/" + filename \
                if file_extension == '.j2' else extract_path + "/" + f
            find_replace(extract_path, f, destination_file_path, parameters)

        # Zip the contents
        exclude = ['zip'] + exclude_j2_files
        make_dir(output_path, logger)
        zip_function(output_zip_file_name, extract_path, output_path, exclude)

        # Upload the file in the customer S3 bucket
        local_file = output_path + "/" + output_zip_file_name
        remote_file = output_zip_file_name
        s3.upload_file(destination_bucket_name, local_file, remote_file)

        # create SSM parameters to send anonymous data if opted in
        put_ssm_parameter('/org/primary/metrics_flag', flag_value)
        put_ssm_parameter('/org/primary/customer_uuid', str(uuid4()))
        return None
    except Exception as e:
        logger.log_general_exception(
            __file__.split('/')[-1],
            inspect.stack()[0][3], e)
        raise