コード例 #1
0
 def run(self):
     _logger.info('reading configuration...')
     output_config = copy.deepcopy(configuration(self.input_config_path, suppress_decryption=True))
     dart_host = _get_dart_host(output_config)
     _logger.info('setting up new dart partial environment: %s' % dart_host)
     self.create_partial(output_config)
     _logger.info('partial environment created with config: %s, url: %s' % (self.output_config_s3_path, dart_host))
コード例 #2
0
 def __init__(self, dart_config):
     email_config = dart_config['email']
     self._env_name = dart_config['dart']['env_name'].upper()
     self._mailer = Mailer(**email_config['mailer'])
     self._from = email_config['from']
     self._cc_on_error = email_config['cc_on_error']
     self._debug = email_config.get('debug', False)
     self._suppress_send = email_config.get('suppress_send', False)
     self._dart_host = _get_dart_host(dart_config)
コード例 #3
0
 def run(self):
     _logger.info('reading configuration...')
     output_config = copy.deepcopy(
         configuration(self.input_config_path, suppress_decryption=True))
     dart_host = _get_dart_host(output_config)
     _logger.info('setting up new dart partial environment: %s' % dart_host)
     self.create_partial(output_config)
     _logger.info('partial environment created with config: %s, url: %s' %
                  (self.output_config_s3_path, dart_host))
コード例 #4
0
ファイル: email.py プロジェクト: ophiradi/dart
 def __init__(self, dart_config):
     email_config = dart_config['email']
     self._env_name = dart_config['dart']['env_name'].upper()
     self._mailer = Mailer(**email_config['mailer'])
     self._from = email_config['from']
     self._cc_on_error = email_config['cc_on_error']
     self._debug = email_config.get('debug', False)
     self._suppress_send = email_config.get('suppress_send', False)
     self._dart_host = _get_dart_host(dart_config)
コード例 #5
0
ファイル: email.py プロジェクト: gkjegan/dart
    def __init__(self, dart_config):
        email_config = dart_config['email']
        self._env_name = dart_config['dart']['env_name'].upper()
        self._mailer = Mailer(**email_config['mailer'])
        self._from = email_config['from']
        self._cc_on_error = email_config['cc_on_error']
        self._debug = email_config.get('debug', False)
        self._suppress_send = email_config.get('suppress_send', False)
        self._dart_host = _get_dart_host(dart_config)

        # We have one mailer being used in trigger_listener
        self._email_queue = Queue(
            maxsize=1000
        )  # we should not have that many emails pending to be sent
        email_worker = Thread(target=self.send_queued_email_runner,
                              args=(self._email_queue, ))
        email_worker.setDaemon(
            True
        )  # we run in a container, when it exits this thread will exit too.
        email_worker.start()
コード例 #6
0
    def run(self):
        _logger.info('reading configuration...')
        output_config = copy.deepcopy(
            configuration(self.input_config_path, suppress_decryption=True))
        dart_host = _get_dart_host(output_config)
        _logger.info('setting up new dart full environment: %s' % dart_host)

        _logger.info('verifying s3 buckets do not exist')
        config_bucket_name = output_config['s3']['config_bucket']
        data_bucket_name = output_config['s3']['data_bucket']
        if 's3' not in self.stacks_to_skip:
            iequals = lambda s1, s2: s1.lower() == s2.lower()
            buckets = [
                b['Name'] for b in boto3.client('s3').list_buckets()['Buckets']
            ]
            matches = [
                b for b in buckets if (iequals(b, config_bucket_name)
                                       or iequals(b, data_bucket_name))
            ]
            if len(matches) > 0:
                raise Exception('s3 config and/or data bucket already exists!')

        _logger.info('creating initial stacks')
        aws_account_id = output_config['dart']['aws_account']
        replacements = {
            '{DART_REGION}': output_config['dart']['region'],
            '{DART_AWS_ACCOUNT}': aws_account_id,
            '{DART_QUEUE_PREFIX}': output_config['dart']['queue_prefix'],
            '{DART_CONFIG_BUCKET}': output_config['s3']['config_bucket'],
            '{DART_DATA_BUCKET}': output_config['s3']['data_bucket'],
        }
        iam_stack_name = self._create_stack('iam', output_config, replacements)
        sns_stack_name = self._create_stack('sns', output_config)

        _logger.info('waiting for stack completion')
        iam_outputs = self._wait_for_stack_completion_and_get_outputs(
            iam_stack_name, 7)
        sns_outputs = self._wait_for_stack_completion_and_get_outputs(
            sns_stack_name, 1)

        uds_inpf_role = _get_element(iam_outputs, 'OutputKey',
                                     'UdsInstanceProfileRole')['OutputValue']
        uds_ec2_inpf = _get_element(iam_outputs, 'OutputKey',
                                    'UdsEc2InstanceProfile')['OutputValue']
        uds_ec2_inpf_role = _get_element(
            iam_outputs, 'OutputKey',
            'UdsEc2InstanceProfileRole')['OutputValue']
        ecs_container_inpf = _get_element(
            iam_outputs, 'OutputKey',
            'EcsContainerInstanceProfile')['OutputValue']
        ecs_container_inpf_role = _get_element(
            iam_outputs, 'OutputKey',
            'EcsContainerInstanceProfileRole')['OutputValue']
        ecs_service_role = _get_element(iam_outputs, 'OutputKey',
                                        'EcsServiceRole')['OutputValue']
        sns_arn = sns_outputs[0]['OutputValue']

        _logger.info('updating configuration with sns arn')
        self._set_cfn_boto_param_value(output_config, 'logs', 'AlarmActions',
                                       sns_arn)

        _logger.info(
            'updating configuration with subscription queue urls/arns')
        subscription_queue_arn, subscription_queue_url = self._ensure_queue_exists(
            output_config, 'subscription_queue')
        s3_params = output_config['cloudformation_stacks']['s3']['boto_args'][
            'Parameters']
        _get_element(s3_params, 'ParameterKey',
                     'DartConfigBucket')['ParameterValue'] = config_bucket_name
        _get_element(s3_params, 'ParameterKey',
                     'DartDataBucket')['ParameterValue'] = data_bucket_name
        _get_element(
            s3_params, 'ParameterKey',
            'SubscriptionQueueUrl')['ParameterValue'] = subscription_queue_url
        _get_element(
            s3_params, 'ParameterKey',
            'SubscriptionQueueArn')['ParameterValue'] = subscription_queue_arn

        _logger.info('creating s3 and logs stacks')
        s3_stack_name = self._create_stack('s3', output_config)
        logs_stack_name = self._create_stack('logs', output_config)

        _logger.info('creating/updating kms key')
        with open(
                dart_root_relative_path(
                    output_config['kms']['key_policy_template'])) as f:
            policy = json.load(f)
            kms_authorized_users = [
                self._role_arn(ecs_container_inpf_role, aws_account_id)
            ]
            kms_authorized_users.extend(
                output_config['dart']['kms_key_user_arns'])
            policy['Statement'][0]['Principal'][
                'AWS'] = 'arn:aws:iam::%s:root' % aws_account_id
            policy['Statement'][1]['Principal']['AWS'] = output_config['dart'][
                'kms_key_admin_arns']
            policy['Statement'][2]['Principal']['AWS'] = kms_authorized_users
            policy['Statement'][3]['Principal']['AWS'] = kms_authorized_users
            policy_text = json.dumps(policy)
        kms_client = boto3.client('kms')
        key_arn = output_config['kms']['key_arn']
        if key_arn and key_arn != '...TBD...':
            kms_client.put_key_policy(KeyId=key_arn,
                                      PolicyName='default',
                                      Policy=policy_text)
        else:
            key_arn = kms_client.create_key(
                Policy=policy_text)['KeyMetadata']['Arn']
            alias = 'alias/dart-%s-secrets' % self.environment_name
            kms_client.create_alias(AliasName=alias, TargetKeyId=key_arn)

        _logger.info(
            'updating configuration with kms key arn and secrets path, etc')
        output_config['engines']['redshift_engine']['options'][
            'kms_key_arn'] = key_arn
        secrets_config = get_secrets_config(output_config)
        values = (config_bucket_name, self.environment_name)
        secrets_s3_path = 's3://%s/secrets/%s' % values
        secrets_config['secrets_s3_path'] = secrets_s3_path
        secrets_config['kms_key_arn'] = key_arn
        eng_cfg = output_config['engines']
        eng_cfg['redshift_engine']['options'][
            'secrets_s3_path'] = secrets_s3_path
        output_config['dart'][
            's3_datastores_root'] = 's3://%s/datastores/%s' % values

        _logger.info('updating configuration with iam profiles/roles')
        output_config['engines']['emr_engine']['options'][
            'instance_profile'] = uds_ec2_inpf
        output_config['engines']['emr_engine']['options'][
            'service_role'] = uds_inpf_role
        output_config['engines']['dynamodb_engine']['options'][
            'emr_instance_profile'] = uds_ec2_inpf
        output_config['engines']['dynamodb_engine']['options'][
            'emr_service_role'] = uds_inpf_role
        self._set_cfn_boto_param_value(output_config, 'engine-taskrunner',
                                       'IamInstanceProfile',
                                       ecs_container_inpf)
        self._set_cfn_boto_param_value(output_config, 'engine-worker',
                                       'IamInstanceProfile',
                                       ecs_container_inpf)
        self._set_cfn_boto_param_value(output_config, 'subscription-worker',
                                       'IamInstanceProfile',
                                       ecs_container_inpf)
        self._set_cfn_boto_param_value(output_config, 'trigger-worker',
                                       'IamInstanceProfile',
                                       ecs_container_inpf)
        self._set_cfn_boto_param_value(output_config, 'web-internal',
                                       'IamInstanceProfile',
                                       ecs_container_inpf)
        self._set_cfn_boto_param_value(output_config, 'web-internal',
                                       'WebEcsServiceRoleName',
                                       ecs_service_role)
        self._set_cfn_boto_param_value(output_config, 'web',
                                       'IamInstanceProfile',
                                       ecs_container_inpf)
        self._set_cfn_boto_param_value(output_config, 'web',
                                       'WebEcsServiceRoleName',
                                       ecs_service_role)

        _logger.info('creating ECR repos')
        ecr_client = boto3.client('ecr')
        all_repo_names = [
            self._full_repo_name(r, output_config)
            for r in output_config['ecr']['repo_names']
        ]
        existing_repo_names = []
        for repo_name in all_repo_names:
            try:
                ecr_client.describe_repositories(repositoryNames=[repo_name])
                existing_repo_names.append(repo_name)
            except ClientError as e:
                if e.response['Error'][
                        'Code'] == 'RepositoryNotFoundException':
                    continue
                raise e
        missing_repo_names = set(all_repo_names) - set(existing_repo_names)
        with open(
                dart_root_relative_path(
                    output_config['ecr']['policy_template'])) as f:
            initial_policy = json.load(f)
            initial_policy['Statement'][0]['Principal']['AWS'] = output_config[
                'dart']['ecr_authorized_user_arns']
            initial_policy_text = json.dumps(initial_policy)
        for repo_name in missing_repo_names:
            ecr_client.create_repository(repositoryName=repo_name)
            ecr_client.set_repository_policy(repositoryName=repo_name,
                                             policyText=initial_policy_text)

        _logger.info('updating ECR repo policies')
        ecr_policy_statement_sid = 'dart-%s-ecs-and-uds-permissions' % self.environment_name
        ecs_container_inpf_role_arn = self._role_arn(ecs_container_inpf_role,
                                                     aws_account_id)
        uds_ec2_inpf_role_arn = self._role_arn(uds_ec2_inpf_role,
                                               aws_account_id)
        for repo_name in all_repo_names:
            policy = json.loads(
                ecr_client.get_repository_policy(
                    repositoryName=repo_name)['policyText'])
            exists_index = None
            for i, statement in enumerate(policy['Statement']):
                if statement['Sid'] == ecr_policy_statement_sid:
                    exists_index = i
            if exists_index:
                policy['Statement'].pop(exists_index)
            policy['Statement'].append({
                'Sid':
                ecr_policy_statement_sid,
                'Effect':
                'Allow',
                'Principal': {
                    'AWS':
                    [ecs_container_inpf_role_arn, uds_ec2_inpf_role_arn]
                },
                'Action': [
                    'ecr:GetDownloadUrlForLayer', 'ecr:BatchGetImage',
                    'ecr:BatchCheckLayerAvailability',
                    'ecr:GetAuthorizationToken'
                ]
            })
            policy_text = json.dumps(policy)
            ecr_client.set_repository_policy(repositoryName=repo_name,
                                             policyText=policy_text)

        _logger.info('updating configuration with docker image references')
        output_config['local_setup'][
            'elasticmq_docker_image'] = self._docker_image(
                'elasticmq', output_config)
        eng_cfg['no_op_engine']['docker_image'] = self._docker_image(
            'engine-no_op', output_config)
        eng_cfg['emr_engine']['docker_image'] = self._docker_image(
            'engine-emr', output_config)
        eng_cfg['emr_engine']['options'][
            'impala_docker_repo_base_url'] = self._ecr_base_url(output_config)
        eng_cfg['dynamodb_engine']['docker_image'] = self._docker_image(
            'engine-dynamodb', output_config)
        eng_cfg['dynamodb_engine']['options'][
            'emr_impala_docker_repo_base_url'] = self._ecr_base_url(
                output_config)
        eng_cfg['redshift_engine']['docker_image'] = self._docker_image(
            'engine-redshift', output_config)
        ew_image = self._docker_image('engine-worker', output_config)
        sw_image = self._docker_image('subscription-worker', output_config)
        tw_image = self._docker_image('trigger-worker', output_config)
        fl_image = self._docker_image('flask', output_config)
        nx_image = self._docker_image('nginx', output_config)
        cwl_image = self._docker_image('cloudwatchlogs', output_config)
        self._set_cfn_boto_param_value(output_config, 'engine-taskrunner',
                                       'CloudWatchLogsDockerImage', cwl_image)
        self._set_cfn_boto_param_value(output_config, 'engine-worker',
                                       'EngineWorkerDockerImage', ew_image)
        self._set_cfn_boto_param_value(output_config, 'engine-worker',
                                       'CloudWatchLogsDockerImage', cwl_image)
        self._set_cfn_boto_param_value(output_config, 'subscription-worker',
                                       'SubscriptionWorkerDockerImage',
                                       sw_image)
        self._set_cfn_boto_param_value(output_config, 'subscription-worker',
                                       'CloudWatchLogsDockerImage', cwl_image)
        self._set_cfn_boto_param_value(output_config, 'trigger-worker',
                                       'TriggerWorkerDockerImage', tw_image)
        self._set_cfn_boto_param_value(output_config, 'trigger-worker',
                                       'CloudWatchLogsDockerImage', cwl_image)
        self._set_cfn_boto_param_value(output_config, 'web-internal',
                                       'FlaskWorkerDockerImage', fl_image)
        self._set_cfn_boto_param_value(output_config, 'web-internal',
                                       'NginxWorkerDockerImage', nx_image)
        self._set_cfn_boto_param_value(output_config, 'web-internal',
                                       'CloudWatchLogsDockerImage', cwl_image)
        self._set_cfn_boto_param_value(output_config, 'web',
                                       'FlaskWorkerDockerImage', fl_image)
        self._set_cfn_boto_param_value(output_config, 'web',
                                       'NginxWorkerDockerImage', nx_image)
        self._set_cfn_boto_param_value(output_config, 'web',
                                       'CloudWatchLogsDockerImage', cwl_image)

        _logger.info('updating configuration with DartConfig references')
        self._set_cfn_boto_param_value(output_config, 'engine-worker',
                                       'DartConfig',
                                       self.output_config_s3_path)
        self._set_cfn_boto_param_value(output_config, 'subscription-worker',
                                       'DartConfig',
                                       self.output_config_s3_path)
        self._set_cfn_boto_param_value(output_config, 'trigger-worker',
                                       'DartConfig',
                                       self.output_config_s3_path)
        self._set_cfn_boto_param_value(output_config, 'web-internal',
                                       'DartConfig',
                                       self.output_config_s3_path)
        self._set_cfn_boto_param_value(output_config, 'web', 'DartConfig',
                                       self.output_config_s3_path)
        eng_cfg['no_op_engine']['config'] = self.output_config_s3_path
        eng_cfg['emr_engine']['config'] = self.output_config_s3_path
        eng_cfg['dynamodb_engine']['config'] = self.output_config_s3_path
        eng_cfg['redshift_engine']['config'] = self.output_config_s3_path

        _logger.info('waiting for logs stack')
        logs_outputs = self._wait_for_stack_completion_and_get_outputs(
            logs_stack_name, 2)
        syslog_log_group_name = _get_element(logs_outputs, 'OutputKey',
                                             'DartSyslog')['OutputValue']
        misc_log_group_name = _get_element(logs_outputs, 'OutputKey',
                                           'DartMisc')['OutputValue']

        self._handle_docker_concerns(cwl_image, eng_cfg, misc_log_group_name,
                                     output_config, syslog_log_group_name)

        _logger.info('waiting for s3 stack')
        self._wait_for_stack_completion_and_get_outputs(s3_stack_name)

        self.create_partial(output_config)

        _logger.info('full environment created with config: %s, url: %s' %
                     (self.output_config_s3_path, dart_host))
コード例 #7
0
    def run(self):
        _logger.info('reading configuration...')
        output_config = copy.deepcopy(configuration(self.input_config_path, suppress_decryption=True))
        dart_host = _get_dart_host(output_config)
        _logger.info('setting up new dart full environment: %s' % dart_host)

        _logger.info('verifying s3 buckets do not exist')
        config_bucket_name = output_config['s3']['config_bucket']
        data_bucket_name = output_config['s3']['data_bucket']
        if 's3' not in self.stacks_to_skip:
            iequals = lambda s1, s2: s1.lower() == s2.lower()
            buckets = [b['Name'] for b in boto3.client('s3').list_buckets()['Buckets']]
            matches = [b for b in buckets if (iequals(b, config_bucket_name) or iequals(b, data_bucket_name))]
            if len(matches) > 0:
                raise Exception('s3 config and/or data bucket already exists!')

        _logger.info('creating initial stacks')
        aws_account_id = output_config['dart']['aws_account']
        replacements = {
            '{DART_REGION}': output_config['dart']['region'],
            '{DART_AWS_ACCOUNT}': aws_account_id,
            '{DART_QUEUE_PREFIX}': output_config['dart']['queue_prefix'],
            '{DART_CONFIG_BUCKET}': output_config['s3']['config_bucket'],
            '{DART_DATA_BUCKET}': output_config['s3']['data_bucket'],
        }
        iam_stack_name = self._create_stack('iam', output_config, replacements)
        sns_stack_name = self._create_stack('sns', output_config)

        _logger.info('waiting for stack completion')
        iam_outputs = self._wait_for_stack_completion_and_get_outputs(iam_stack_name, 7)
        sns_outputs = self._wait_for_stack_completion_and_get_outputs(sns_stack_name, 1)

        uds_inpf_role = _get_element(iam_outputs, 'OutputKey', 'UdsInstanceProfileRole')['OutputValue']
        uds_ec2_inpf = _get_element(iam_outputs, 'OutputKey', 'UdsEc2InstanceProfile')['OutputValue']
        uds_ec2_inpf_role = _get_element(iam_outputs, 'OutputKey', 'UdsEc2InstanceProfileRole')['OutputValue']
        ecs_container_inpf = _get_element(iam_outputs, 'OutputKey', 'EcsContainerInstanceProfile')['OutputValue']
        ecs_container_inpf_role = _get_element(iam_outputs, 'OutputKey', 'EcsContainerInstanceProfileRole')['OutputValue']
        ecs_service_role = _get_element(iam_outputs, 'OutputKey', 'EcsServiceRole')['OutputValue']
        sns_arn = sns_outputs[0]['OutputValue']

        _logger.info('updating configuration with sns arn')
        self._set_cfn_boto_param_value(output_config, 'logs', 'AlarmActions', sns_arn)

        _logger.info('updating configuration with subscription queue urls/arns')
        subscription_queue_arn, subscription_queue_url = self._ensure_queue_exists(output_config, 'subscription_queue')
        s3_params = output_config['cloudformation_stacks']['s3']['boto_args']['Parameters']
        _get_element(s3_params, 'ParameterKey', 'DartConfigBucket')['ParameterValue'] = config_bucket_name
        _get_element(s3_params, 'ParameterKey', 'DartDataBucket')['ParameterValue'] = data_bucket_name
        _get_element(s3_params, 'ParameterKey', 'SubscriptionQueueUrl')['ParameterValue'] = subscription_queue_url
        _get_element(s3_params, 'ParameterKey', 'SubscriptionQueueArn')['ParameterValue'] = subscription_queue_arn

        _logger.info('creating s3 and logs stacks')
        s3_stack_name = self._create_stack('s3', output_config)
        logs_stack_name = self._create_stack('logs', output_config)

        _logger.info('creating/updating kms key')
        with open(dart_root_relative_path(output_config['kms']['key_policy_template'])) as f:
            policy = json.load(f)
            kms_authorized_users = [self._role_arn(ecs_container_inpf_role, aws_account_id)]
            kms_authorized_users.extend(output_config['dart']['kms_key_user_arns'])
            policy['Statement'][0]['Principal']['AWS'] = 'arn:aws:iam::%s:root' % aws_account_id
            policy['Statement'][1]['Principal']['AWS'] = output_config['dart']['kms_key_admin_arns']
            policy['Statement'][2]['Principal']['AWS'] = kms_authorized_users
            policy['Statement'][3]['Principal']['AWS'] = kms_authorized_users
            policy_text = json.dumps(policy)
        kms_client = boto3.client('kms')
        key_arn = output_config['kms']['key_arn']
        if key_arn and key_arn != '...TBD...':
            kms_client.put_key_policy(KeyId=key_arn, PolicyName='default', Policy=policy_text)
        else:
            key_arn = kms_client.create_key(Policy=policy_text)['KeyMetadata']['Arn']
            alias = 'alias/dart-%s-secrets' % self.environment_name
            kms_client.create_alias(AliasName=alias, TargetKeyId=key_arn)

        _logger.info('updating configuration with kms key arn and secrets path, etc')
        output_config['engines']['redshift_engine']['options']['kms_key_arn'] = key_arn
        secrets_config = get_secrets_config(output_config)
        values = (config_bucket_name, self.environment_name)
        secrets_s3_path = 's3://%s/secrets/%s' % values
        secrets_config['secrets_s3_path'] = secrets_s3_path
        secrets_config['kms_key_arn'] = key_arn
        eng_cfg = output_config['engines']
        eng_cfg['redshift_engine']['options']['secrets_s3_path'] = secrets_s3_path
        output_config['dart']['s3_datastores_root'] = 's3://%s/datastores/%s' % values

        _logger.info('updating configuration with iam profiles/roles')
        output_config['engines']['emr_engine']['options']['instance_profile'] = uds_ec2_inpf
        output_config['engines']['emr_engine']['options']['service_role'] = uds_inpf_role
        output_config['engines']['dynamodb_engine']['options']['emr_instance_profile'] = uds_ec2_inpf
        output_config['engines']['dynamodb_engine']['options']['emr_service_role'] = uds_inpf_role
        self._set_cfn_boto_param_value(output_config, 'engine-taskrunner', 'IamInstanceProfile', ecs_container_inpf)
        self._set_cfn_boto_param_value(output_config, 'engine-worker', 'IamInstanceProfile', ecs_container_inpf)
        self._set_cfn_boto_param_value(output_config, 'subscription-worker', 'IamInstanceProfile', ecs_container_inpf)
        self._set_cfn_boto_param_value(output_config, 'trigger-worker', 'IamInstanceProfile', ecs_container_inpf)
        self._set_cfn_boto_param_value(output_config, 'web-internal', 'IamInstanceProfile', ecs_container_inpf)
        self._set_cfn_boto_param_value(output_config, 'web-internal', 'WebEcsServiceRoleName', ecs_service_role)
        self._set_cfn_boto_param_value(output_config, 'web', 'IamInstanceProfile', ecs_container_inpf)
        self._set_cfn_boto_param_value(output_config, 'web', 'WebEcsServiceRoleName', ecs_service_role)

        _logger.info('creating ECR repos')
        ecr_client = boto3.client('ecr')
        all_repo_names = [self._full_repo_name(r, output_config) for r in output_config['ecr']['repo_names']]
        existing_repo_names = []
        for repo_name in all_repo_names:
            try:
                ecr_client.describe_repositories(repositoryNames=[repo_name])
                existing_repo_names.append(repo_name)
            except ClientError as e:
                if e.response['Error']['Code'] == 'RepositoryNotFoundException':
                    continue
                raise e
        missing_repo_names = set(all_repo_names) - set(existing_repo_names)
        with open(dart_root_relative_path(output_config['ecr']['policy_template'])) as f:
            initial_policy = json.load(f)
            initial_policy['Statement'][0]['Principal']['AWS'] = output_config['dart']['ecr_authorized_user_arns']
            initial_policy_text = json.dumps(initial_policy)
        for repo_name in missing_repo_names:
            ecr_client.create_repository(repositoryName=repo_name)
            ecr_client.set_repository_policy(repositoryName=repo_name, policyText=initial_policy_text)

        _logger.info('updating ECR repo policies')
        ecr_policy_statement_sid = 'dart-%s-ecs-and-uds-permissions' % self.environment_name
        ecs_container_inpf_role_arn = self._role_arn(ecs_container_inpf_role, aws_account_id)
        uds_ec2_inpf_role_arn = self._role_arn(uds_ec2_inpf_role, aws_account_id)
        for repo_name in all_repo_names:
            policy = json.loads(ecr_client.get_repository_policy(repositoryName=repo_name)['policyText'])
            exists_index = None
            for i, statement in enumerate(policy['Statement']):
                if statement['Sid'] == ecr_policy_statement_sid:
                    exists_index = i
            if exists_index:
                policy['Statement'].pop(exists_index)
            policy['Statement'].append({
                'Sid': ecr_policy_statement_sid,
                'Effect': 'Allow',
                'Principal': {'AWS': [ecs_container_inpf_role_arn, uds_ec2_inpf_role_arn]},
                'Action': [
                    'ecr:GetDownloadUrlForLayer',
                    'ecr:BatchGetImage',
                    'ecr:BatchCheckLayerAvailability',
                    'ecr:GetAuthorizationToken'
                ]
            })
            policy_text = json.dumps(policy)
            ecr_client.set_repository_policy(repositoryName=repo_name, policyText=policy_text)

        _logger.info('updating configuration with docker image references')
        output_config['local_setup']['elasticmq_docker_image'] = self._docker_image('elasticmq', output_config)
        eng_cfg['no_op_engine']['docker_image'] = self._docker_image('engine-no_op', output_config)
        eng_cfg['emr_engine']['docker_image'] = self._docker_image('engine-emr', output_config)
        eng_cfg['emr_engine']['options']['impala_docker_repo_base_url'] = self._ecr_base_url(output_config)
        eng_cfg['dynamodb_engine']['docker_image'] = self._docker_image('engine-dynamodb', output_config)
        eng_cfg['dynamodb_engine']['options']['emr_impala_docker_repo_base_url'] = self._ecr_base_url(output_config)
        eng_cfg['redshift_engine']['docker_image'] = self._docker_image('engine-redshift', output_config)
        ew_image = self._docker_image('engine-worker', output_config)
        sw_image = self._docker_image('subscription-worker', output_config)
        tw_image = self._docker_image('trigger-worker', output_config)
        fl_image = self._docker_image('flask', output_config)
        nx_image = self._docker_image('nginx', output_config)
        cwl_image = self._docker_image('cloudwatchlogs', output_config)
        self._set_cfn_boto_param_value(output_config, 'engine-taskrunner', 'CloudWatchLogsDockerImage', cwl_image)
        self._set_cfn_boto_param_value(output_config, 'engine-worker', 'EngineWorkerDockerImage', ew_image)
        self._set_cfn_boto_param_value(output_config, 'engine-worker', 'CloudWatchLogsDockerImage', cwl_image)
        self._set_cfn_boto_param_value(output_config, 'subscription-worker', 'SubscriptionWorkerDockerImage', sw_image)
        self._set_cfn_boto_param_value(output_config, 'subscription-worker', 'CloudWatchLogsDockerImage', cwl_image)
        self._set_cfn_boto_param_value(output_config, 'trigger-worker', 'TriggerWorkerDockerImage', tw_image)
        self._set_cfn_boto_param_value(output_config, 'trigger-worker', 'CloudWatchLogsDockerImage', cwl_image)
        self._set_cfn_boto_param_value(output_config, 'web-internal', 'FlaskWorkerDockerImage', fl_image)
        self._set_cfn_boto_param_value(output_config, 'web-internal', 'NginxWorkerDockerImage', nx_image)
        self._set_cfn_boto_param_value(output_config, 'web-internal', 'CloudWatchLogsDockerImage', cwl_image)
        self._set_cfn_boto_param_value(output_config, 'web', 'FlaskWorkerDockerImage', fl_image)
        self._set_cfn_boto_param_value(output_config, 'web', 'NginxWorkerDockerImage', nx_image)
        self._set_cfn_boto_param_value(output_config, 'web', 'CloudWatchLogsDockerImage', cwl_image)

        _logger.info('updating configuration with DartConfig references')
        self._set_cfn_boto_param_value(output_config, 'engine-worker', 'DartConfig', self.output_config_s3_path)
        self._set_cfn_boto_param_value(output_config, 'subscription-worker', 'DartConfig', self.output_config_s3_path)
        self._set_cfn_boto_param_value(output_config, 'trigger-worker', 'DartConfig', self.output_config_s3_path)
        self._set_cfn_boto_param_value(output_config, 'web-internal', 'DartConfig', self.output_config_s3_path)
        self._set_cfn_boto_param_value(output_config, 'web', 'DartConfig', self.output_config_s3_path)
        eng_cfg['no_op_engine']['config'] = self.output_config_s3_path
        eng_cfg['emr_engine']['config'] = self.output_config_s3_path
        eng_cfg['dynamodb_engine']['config'] = self.output_config_s3_path
        eng_cfg['redshift_engine']['config'] = self.output_config_s3_path

        _logger.info('waiting for logs stack')
        logs_outputs = self._wait_for_stack_completion_and_get_outputs(logs_stack_name, 2)
        syslog_log_group_name = _get_element(logs_outputs, 'OutputKey', 'DartSyslog')['OutputValue']
        misc_log_group_name = _get_element(logs_outputs, 'OutputKey', 'DartMisc')['OutputValue']

        self._handle_docker_concerns(cwl_image, eng_cfg, misc_log_group_name, output_config, syslog_log_group_name)

        _logger.info('waiting for s3 stack')
        self._wait_for_stack_completion_and_get_outputs(s3_stack_name)

        self.create_partial(output_config)

        _logger.info('full environment created with config: %s, url: %s' % (self.output_config_s3_path, dart_host))
コード例 #8
0
    def create_partial(self, output_config):
        _logger.info('updating configuration with trigger queue urls/arns')
        trigger_queue_arn, trigger_queue_url = self._ensure_queue_exists(
            output_config, 'trigger_queue')
        events_params = output_config['cloudformation_stacks']['events'][
            'boto_args']['Parameters']
        _get_element(events_params, 'ParameterKey',
                     'TriggerQueueUrl')['ParameterValue'] = trigger_queue_url
        _get_element(events_params, 'ParameterKey',
                     'TriggerQueueArn')['ParameterValue'] = trigger_queue_arn

        _logger.info('creating initial stacks')
        events_stack_name = self._create_stack('events', self.mode,
                                               output_config)
        rds_stack_name = self._create_stack('rds', self.mode, output_config)
        elb_stack_name = self._create_stack('elb', self.mode, output_config)
        elb_int_stack_name = self._create_stack('elb-internal', self.mode,
                                                output_config)
        engine_taskrunner_stack_name = self._create_stack(
            'engine-taskrunner', self.mode, output_config)

        _logger.info('waiting for stack completion')
        events_outputs = self._wait_for_stack_completion_and_get_outputs(
            events_stack_name, 1)
        rds_outputs = self._wait_for_stack_completion_and_get_outputs(
            rds_stack_name, 1)
        elb_outputs = self._wait_for_stack_completion_and_get_outputs(
            elb_stack_name, 1)
        elb_int_outputs = self._wait_for_stack_completion_and_get_outputs(
            elb_int_stack_name, 1)
        engine_taskrunner_outputs = self._wait_for_stack_completion_and_get_outputs(
            engine_taskrunner_stack_name, 1)

        _logger.info(
            'updating configuration with new cloudwatch scheduled events sns topic name'
        )
        sns_arn = events_outputs[0]['OutputValue']
        output_config['triggers']['scheduled'][
            'cloudwatch_scheduled_events_sns_arn'] = sns_arn

        _logger.info(
            'updating configuration with new rds endpoint and password')
        db_uri_secret_key = 'database-uri-%s' % self.environment_name
        output_config['flask'][
            'SQLALCHEMY_DATABASE_URI'] = '!decrypt %s' % db_uri_secret_key
        secrets_config = get_secrets_config(output_config)
        secrets_service = Secrets(secrets_config['kms_key_arn'],
                                  secrets_config['secrets_s3_path'])
        rds_pwd = os.environ['DART_RDS_PASSWORD']
        rds_host = rds_outputs[0]['OutputValue']
        secrets_service.put(
            db_uri_secret_key,
            'postgresql://*****:*****@%s:5432/dart' % (rds_pwd, rds_host))

        _logger.info('updating configuration with new elb name')
        web_params = output_config['cloudformation_stacks']['web'][
            'boto_args']['Parameters']
        elb_name_param = _get_element(web_params, 'ParameterKey',
                                      'WebEcsServiceLoadBalancerName')
        elb_name = elb_outputs[0]['OutputValue']
        elb_name_param['ParameterValue'] = elb_name

        _logger.info('updating configuration with new internal elb name')
        web_int_params = output_config['cloudformation_stacks'][
            'web-internal']['boto_args']['Parameters']
        elb_int_name_param = _get_element(web_int_params, 'ParameterKey',
                                          'WebEcsServiceLoadBalancerName')
        elb_int_name = elb_int_outputs[0]['OutputValue']
        elb_int_name_param['ParameterValue'] = elb_int_name

        _logger.info(
            'updating configuration with new engine taskrunner ecs cluster name'
        )
        output_config['dart'][
            'engine_taskrunner_ecs_cluster'] = engine_taskrunner_outputs[0][
                'OutputValue']

        _logger.info(
            'updating configuration with encrypted dart email username/password'
        )
        mailer_options = output_config['email']['mailer']
        mailer_options['usr'] = '******'
        mailer_options['pwd'] = '!decrypt email-password'
        secrets_service.put('email-username', self.dart_email_username)
        secrets_service.put('email-password', self.dart_email_password)

        _logger.info('uploading the output configuration to s3')
        body = yaml.dump(output_config, default_flow_style=False)
        body = re.sub(r"'!decrypt (.+?)'", r"!decrypt \1", body)
        body = re.sub(r"'!env (.+?)'", r"!env \1", body)
        body = re.sub(r"__DARTBANG__", r"!", body)
        body = re.sub(r"__DARTQUOTE__", r"'", body)
        body = re.sub(r"__DARTDOLLAR__", r"$", body)
        boto3.client('s3').put_object(
            Bucket=get_bucket_name(self.output_config_s3_path),
            Key=get_key_name(self.output_config_s3_path),
            Body=body)

        _logger.info('creating and waiting for web stacks')
        web_stack_name = self._create_stack('web', self.mode, output_config)
        web_internal_stack_name = self._create_stack('web-internal', self.mode,
                                                     output_config)
        web_outputs = self._wait_for_stack_completion_and_get_outputs(
            web_stack_name, 2)
        self._wait_for_stack_completion_and_get_outputs(
            web_internal_stack_name)

        _logger.info('waiting for web ecs service to stabilize')
        cluster_name = _get_element(web_outputs, 'OutputKey',
                                    'EcsClusterResourceName')['OutputValue']
        service_name = _get_element(web_outputs, 'OutputKey',
                                    'WebEcsServiceResourceName')['OutputValue']
        boto3.client('ecs').get_waiter('services_stable').wait(
            cluster=cluster_name, services=[service_name])
        _logger.info('done')

        _logger.info('waiting for web app to attach to load balancer')
        self._wait_for_web_app(elb_name)
        time.sleep(5)

        _logger.info('initializing database schema')
        dart_host = _get_dart_host(output_config)
        response = requests.post('http://%s/admin/create_all' % dart_host)
        response.raise_for_status()
        time.sleep(5)

        _logger.info('creating database triggers')
        with open(dart_root_relative_path('src', 'database',
                                          'triggers.sql')) as f:
            engine = sqlalchemy.create_engine(
                'postgresql://*****:*****@%s:5432/dart' % (rds_pwd, rds_host))
            engine.execute(f.read())
        _logger.info('done')
        time.sleep(5)

        _logger.info('adding engines')
        self._with_retries(add_no_op_engine, output_config)
        self._with_retries(add_no_op_engine_sub_graphs, output_config)
        self._with_retries(add_emr_engine, output_config)
        self._with_retries(add_emr_engine_sub_graphs, output_config)
        self._with_retries(add_dynamodb_engine, output_config)
        self._with_retries(add_redshift_engine, output_config)
        self._with_retries(add_s3_engine, output_config)

        _logger.info('creating and waiting for remaining stacks')
        engine_worker_stack_name = self._create_stack('engine-worker',
                                                      self.mode, output_config)
        trigger_worker_stack_name = self._create_stack('trigger-worker',
                                                       self.mode,
                                                       output_config)
        subscription_worker_stack_name = self._create_stack(
            'subscription-worker', self.mode, output_config)
        self._wait_for_stack_completion_and_get_outputs(
            engine_worker_stack_name)
        self._wait_for_stack_completion_and_get_outputs(
            trigger_worker_stack_name)
        self._wait_for_stack_completion_and_get_outputs(
            subscription_worker_stack_name)
コード例 #9
0
    def create_partial(self, output_config):
        _logger.info('updating configuration with trigger queue urls/arns')
        trigger_queue_arn, trigger_queue_url = self._ensure_queue_exists(output_config, 'trigger_queue')
        events_params = output_config['cloudformation_stacks']['events']['boto_args']['Parameters']
        _get_element(events_params, 'ParameterKey', 'TriggerQueueUrl')['ParameterValue'] = trigger_queue_url
        _get_element(events_params, 'ParameterKey', 'TriggerQueueArn')['ParameterValue'] = trigger_queue_arn

        _logger.info('creating initial stacks')
        events_stack_name = self._create_stack('events', self.mode, output_config)
        rds_stack_name = self._create_stack('rds', self.mode, output_config)
        elb_stack_name = self._create_stack('elb', self.mode, output_config)
        elb_int_stack_name = self._create_stack('elb-internal', self.mode, output_config)
        engine_taskrunner_stack_name = self._create_stack('engine-taskrunner', self.mode, output_config)

        _logger.info('waiting for stack completion')
        events_outputs = self._wait_for_stack_completion_and_get_outputs(events_stack_name, 1)
        rds_outputs = self._wait_for_stack_completion_and_get_outputs(rds_stack_name, 1)
        elb_outputs = self._wait_for_stack_completion_and_get_outputs(elb_stack_name, 1)
        elb_int_outputs = self._wait_for_stack_completion_and_get_outputs(elb_int_stack_name, 1)
        engine_taskrunner_outputs = self._wait_for_stack_completion_and_get_outputs(engine_taskrunner_stack_name, 1)

        _logger.info('updating configuration with new cloudwatch scheduled events sns topic name')
        sns_arn = events_outputs[0]['OutputValue']
        output_config['triggers']['scheduled']['cloudwatch_scheduled_events_sns_arn'] = sns_arn

        _logger.info('updating configuration with new rds endpoint and password')
        db_uri_secret_key = 'database-uri-%s' % self.environment_name
        output_config['flask']['SQLALCHEMY_DATABASE_URI'] = '!decrypt %s' % db_uri_secret_key
        secrets_config = get_secrets_config(output_config)
        secrets_service = Secrets(secrets_config['kms_key_arn'], secrets_config['secrets_s3_path'])
        rds_pwd = os.environ['DART_RDS_PASSWORD']
        rds_host = rds_outputs[0]['OutputValue']
        secrets_service.put(db_uri_secret_key, 'postgresql://*****:*****@%s:5432/dart' % (rds_pwd, rds_host))

        _logger.info('updating configuration with new elb name')
        web_params = output_config['cloudformation_stacks']['web']['boto_args']['Parameters']
        elb_name_param = _get_element(web_params, 'ParameterKey', 'WebEcsServiceLoadBalancerName')
        elb_name = elb_outputs[0]['OutputValue']
        elb_name_param['ParameterValue'] = elb_name

        _logger.info('updating configuration with new internal elb name')
        web_int_params = output_config['cloudformation_stacks']['web-internal']['boto_args']['Parameters']
        elb_int_name_param = _get_element(web_int_params, 'ParameterKey', 'WebEcsServiceLoadBalancerName')
        elb_int_name = elb_int_outputs[0]['OutputValue']
        elb_int_name_param['ParameterValue'] = elb_int_name

        _logger.info('updating configuration with new engine taskrunner ecs cluster name')
        output_config['dart']['engine_taskrunner_ecs_cluster'] = engine_taskrunner_outputs[0]['OutputValue']

        _logger.info('updating configuration with encrypted dart email username/password')
        mailer_options = output_config['email']['mailer']
        mailer_options['usr'] = '******'
        mailer_options['pwd'] = '!decrypt email-password'
        secrets_service.put('email-username', self.dart_email_username)
        secrets_service.put('email-password', self.dart_email_password)

        _logger.info('uploading the output configuration to s3')
        body = yaml.dump(output_config, default_flow_style=False)
        body = re.sub(r"'!decrypt (.+?)'", r"!decrypt \1", body)
        body = re.sub(r"'!env (.+?)'", r"!env \1", body)
        body = re.sub(r"__DARTBANG__", r"!", body)
        body = re.sub(r"__DARTQUOTE__", r"'", body)
        body = re.sub(r"__DARTDOLLAR__", r"$", body)
        boto3.client('s3').put_object(
            Bucket=get_bucket_name(self.output_config_s3_path),
            Key=get_key_name(self.output_config_s3_path),
            Body=body
        )

        _logger.info('creating and waiting for web stacks')
        web_stack_name = self._create_stack('web', self.mode, output_config)
        web_internal_stack_name = self._create_stack('web-internal', self.mode, output_config)
        web_outputs = self._wait_for_stack_completion_and_get_outputs(web_stack_name, 2)
        self._wait_for_stack_completion_and_get_outputs(web_internal_stack_name)

        _logger.info('waiting for web ecs service to stabilize')
        cluster_name = _get_element(web_outputs, 'OutputKey', 'EcsClusterResourceName')['OutputValue']
        service_name = _get_element(web_outputs, 'OutputKey', 'WebEcsServiceResourceName')['OutputValue']
        boto3.client('ecs').get_waiter('services_stable').wait(cluster=cluster_name, services=[service_name])
        _logger.info('done')

        _logger.info('waiting for web app to attach to load balancer')
        self._wait_for_web_app(elb_name)
        time.sleep(5)

        _logger.info('initializing database schema')
        dart_host = _get_dart_host(output_config)
        response = requests.post('http://%s/admin/create_all' % dart_host)
        response.raise_for_status()
        time.sleep(5)

        _logger.info('creating database triggers')
        with open(dart_root_relative_path('src', 'database', 'triggers.sql')) as f:
            engine = sqlalchemy.create_engine('postgresql://*****:*****@%s:5432/dart' % (rds_pwd, rds_host))
            engine.execute(f.read())
        _logger.info('done')
        time.sleep(5)

        _logger.info('adding engines')
        self._with_retries(add_no_op_engine, output_config)
        self._with_retries(add_no_op_engine_sub_graphs, output_config)
        self._with_retries(add_emr_engine, output_config)
        self._with_retries(add_emr_engine_sub_graphs, output_config)
        self._with_retries(add_dynamodb_engine, output_config)
        self._with_retries(add_redshift_engine, output_config)
        self._with_retries(add_s3_engine, output_config)

        _logger.info('creating and waiting for remaining stacks')
        engine_worker_stack_name = self._create_stack('engine-worker', self.mode, output_config)
        trigger_worker_stack_name = self._create_stack('trigger-worker', self.mode, output_config)
        subscription_worker_stack_name = self._create_stack('subscription-worker', self.mode, output_config)
        self._wait_for_stack_completion_and_get_outputs(engine_worker_stack_name)
        self._wait_for_stack_completion_and_get_outputs(trigger_worker_stack_name)
        self._wait_for_stack_completion_and_get_outputs(subscription_worker_stack_name)