Ejemplo n.º 1
0
def test_s3_upload(cleanup_buckets, awsclient):
    #upload_conf = ConfigFactory.parse_file(
    #    here('resources/simple_cloudformation_stack/settings_upload_dev.conf')
    #)

    upload_conf = {
        'stack': {
            'StackName': "infra-dev-kumo-sample-stack",
            'artifactBucket': "unittest-kumo-artifact-bucket"
        },
        'parameters': {
            'InstanceType': "t2.micro"
        }
    }

    region = awsclient.get_client('s3').meta.region_name
    account = os.getenv('ACCOUNT', None)
    # add account prefix to artifact bucket config
    if account:
        upload_conf['stack']['artifactBucket'] = \
            '%s-unittest-kumo-artifact-bucket' % account

    artifact_bucket = _get_artifact_bucket(upload_conf)
    prepare_artifacts_bucket(awsclient, artifact_bucket)
    cleanup_buckets.append(artifact_bucket)
    dest_key = 'kumo/%s/%s-cloudformation.json' % (
        region, _get_stack_name(upload_conf))
    expected_s3url = 'https://s3-%s.amazonaws.com/%s/%s' % (
        region, artifact_bucket, dest_key)
    cloudformation_simple_stack, _ = load_cloudformation_template(
        here('resources/simple_cloudformation_stack/cloudformation.py'))
    actual_s3url = _s3_upload(
        awsclient, upload_conf,
        generate_template({}, upload_conf, cloudformation_simple_stack))
    assert expected_s3url == actual_s3url
Ejemplo n.º 2
0
def deploy_cmd(**tooldata):
    context = tooldata.get('context')
    config = tooldata.get('config')
    awsclient = context.get('_awsclient')

    prepare_artifacts_bucket(awsclient,
                             config['codedeploy'].get('artifactsBucket'))
    # TODO deprecate prebundle hook with reference to new signal-based-hooks
    # TODO and move them to glomex_bundler
    pre_bundle_scripts = config.get('preBundle', None)
    if pre_bundle_scripts:
        exit_code = utils.execute_scripts(pre_bundle_scripts)
        if exit_code != 0:
            print('Pre bundle script exited with error')
            return 1

    bucket = config['codedeploy'].get('artifactsBucket')

    deployment = deploy(
        awsclient=awsclient,
        applicationName=config['codedeploy'].get('applicationName'),
        deploymentGroupName=config['codedeploy'].get('deploymentGroupName'),
        deploymentConfigName=config['codedeploy'].get('deploymentConfigName'),
        bucket=bucket,
        bundlefile=context['_bundle_file']
    )

    exit_code = deployment_status(awsclient, deployment)
    if exit_code:
        return 1
Ejemplo n.º 3
0
def deploy_cmd(**tooldata):
    context = tooldata.get('context')
    config = tooldata.get('config')
    awsclient = context.get('_awsclient')
    # in case we fail we limit log output to after start_time
    start_time = maya.now().datetime(naive=True)
    log_group = config.get('deployment',
                           {}).get('LogGroup',
                                   DEFAULT_CONFIG['tenkai']['log_group'])

    prepare_artifacts_bucket(awsclient,
                             config['codedeploy'].get('artifactsBucket'))
    # TODO deprecate prebundle hook with reference to new signal-based-hooks
    # TODO and move them to glomex_bundler
    pre_bundle_scripts = config.get('preBundle', None)
    if pre_bundle_scripts:
        exit_code = utils.execute_scripts(pre_bundle_scripts)
        if exit_code != 0:
            log.error('Pre bundle script exited with error')
            return 1

    bucket = config['codedeploy'].get('artifactsBucket')

    try:
        deployment = deploy(
            awsclient=awsclient,
            applicationName=config['codedeploy'].get('applicationName'),
            deploymentGroupName=config['codedeploy'].get(
                'deploymentGroupName'),
            deploymentConfigName=config['codedeploy'].get(
                'deploymentConfigName'),
            bucket=bucket,
            bundlefile=context['_bundle_file'])

        exit_code = output_deployment_status(awsclient, deployment)
        output_deployment_summary(awsclient, deployment)
        output_deployment_diagnostics(awsclient, deployment, log_group,
                                      start_time)
        if exit_code:
            return 1

    except GracefulExit as e:
        log.warn('Received %s signal - stopping tenkai deployment' % str(e))
        stop_deployment(awsclient, deployment)
        exit_code = 1

    # remove bundle file
    if context['_bundle_file'] and os.path.exists(context['_bundle_file']):
        os.unlink(context['_bundle_file'])

    return exit_code
Ejemplo n.º 4
0
def test_s3_upload(cleanup_buckets, awsclient):
    upload_conf = ConfigFactory.parse_file(
        here('resources/simple_cloudformation_stack/settings_upload_dev.conf'))

    region = awsclient.get_client('s3').meta.region_name
    account = os.getenv('ACCOUNT', None)
    # add account prefix to artifact bucket config
    if account:
        upload_conf['cloudformation']['artifactBucket'] = \
            '%s-unittest-kumo-artifact-bucket' % account

    artifact_bucket = _get_artifact_bucket(upload_conf)
    prepare_artifacts_bucket(awsclient, artifact_bucket)
    cleanup_buckets.append(artifact_bucket)
    dest_key = 'kumo/%s/%s-cloudformation.json' % (
        region, _get_stack_name(upload_conf))
    expected_s3url = 'https://s3-%s.amazonaws.com/%s/%s' % (
        region, artifact_bucket, dest_key)
    cloudformation_simple_stack, _ = load_cloudformation_template(
        here('resources/simple_cloudformation_stack/cloudformation.py'))
    actual_s3url = _s3_upload(awsclient, upload_conf,
                              cloudformation_simple_stack)
    assert expected_s3url == actual_s3url
Ejemplo n.º 5
0
def test_create_update_stack_artifactbucket(awsclient, temp_cloudformation_policy,
                                     cleanup_roles, cleanup_buckets):
    # create a stack we use for the test lifecycle
    cloudformation_simple_stack, _ = load_cloudformation_template(
        here('resources/simple_cloudformation_stack/cloudformation.py')
    )

    upload_conf = {
        'stack': {
            'StackName': "infra-dev-kumo-sample-stack",
            'artifactBucket': "unittest-kumo-artifact-bucket"
        },
        'parameters': {
            'InstanceType': "t2.micro",
        }
    }

    region = awsclient.get_client('s3').meta.region_name
    account = os.getenv('ACCOUNT', None)
    # add account prefix to artifact bucket config
    if account:
        upload_conf['stack']['artifactBucket'] = \
            '%s-unittest-kumo-artifact-bucket' % account

    artifact_bucket = _get_artifact_bucket(upload_conf)
    prepare_artifacts_bucket(awsclient, artifact_bucket)
    cleanup_buckets.append(artifact_bucket)
    dest_key = 'kumo/%s/%s-cloudformation.json' % (region,
                                                   _get_stack_name(upload_conf))
    expected_s3url = 'https://s3-%s.amazonaws.com/%s/%s' % (region,
                                                            artifact_bucket,
                                                            dest_key)
    actual_s3url = _s3_upload(awsclient, upload_conf,
                              generate_template({}, upload_conf, cloudformation_simple_stack))
    assert expected_s3url == actual_s3url

    # create role to use for cloudformation update
    role = create_role_helper(
        awsclient,
        'unittest_%s_kumo' % utils.random_string(),
        policies=[
            temp_cloudformation_policy,
            'arn:aws:iam::aws:policy/AWSCodeDeployReadOnlyAccess',
            'arn:aws:iam::aws:policy/AmazonS3FullAccess'
        ],
        principal_service=['cloudformation.amazonaws.com']
    )
    cleanup_roles.append(role['RoleName'])

    # create
    exit_code = deploy_stack(awsclient, {}, upload_conf,
                             cloudformation_simple_stack,
                             override_stack_policy=False)
    assert exit_code == 0
    stack_id = get_stack_id(awsclient, upload_conf['stack']['StackName'])
    wait_for_stack_create_complete(awsclient, stack_id)

    # update (as a change we add the RoleARN)
    upload_conf['stack']['RoleARN'] = role['Arn']

    # update the stack
    changed = get_parameter_diff(awsclient, upload_conf)
    assert not changed
    exit_code = deploy_stack(awsclient, {}, upload_conf,
                             cloudformation_simple_stack,
                             override_stack_policy=False)
    assert exit_code == 0
    wait_for_stack_update_complete(awsclient, stack_id)

    # cleanup
    exit_code = delete_stack(awsclient, upload_conf)
    assert exit_code == 0
    wait_for_stack_delete_complete(awsclient, stack_id)