Exemple #1
0
def check_remote():

    if cmd != 'apply':
        return

    log.ok('git: checking remote')
    subprocess.check_output(['git', 'remote', 'update'])
    local = subprocess.check_output(['git', 'rev-parse',
                                     'HEAD']).rstrip().decode('utf-8')

    try:
        remote = subprocess.check_output(
            ['git', 'rev-parse', '@{upstream}'],
            stderr=subprocess.STDOUT,
        ).rstrip().decode('utf-8')
    except subprocess.CalledProcessError as error:
        # There may not be any upstream configured for the branch.
        output = error.output.rstrip().decode('utf-8')
        log.bad('git: {}', output)
        abort()
    else:
        if local != remote:
            status = subprocess.check_output(['git', 'status'
                                              ]).rstrip().decode('utf-8')
            for line in status.splitlines():
                if 'branch' in line and ('ahead' in line or 'behind' in line):
                    log.bad('git: {}', line.lower())
                    break
            else:
                log.bad('git: out of date')
            abort()
Exemple #2
0
def abort():
    if os.environ.get('GIT_CHECK') == '0':
        pass
    else:
        log.bad('git: refusing to continue')
        log.ok('hint: set GIT_CHECK=0 to bypass git checks')
        sys.exit(1)
Exemple #3
0
def create():
    path = os.path.join(cwd, '.jinjaformrc')
    try:
        with open(path, 'x') as open_file:
            open_file.write(default)
    except FileExistsError:
        log.bad('.jinjaformrc file already exists')
        return 1
    else:
        log.ok('created {}', path)
        log.ok('your project root directory is {}', cwd)
        return 0
Exemple #4
0
def credentials_setup():
    """
    Sets up AWS credentials using Terraform AWS provider blocks.

    """

    profile = aws_provider.get('profile')
    if not profile:
        return

    log.ok('aws-profile: {}', profile)

    botocore_session = botocore.session.Session(profile=profile)
    cli_cache_path = os.path.join(os.path.expanduser('~'), '.aws/cli/cache')
    cli_cache = botocore.credentials.JSONFileCache(cli_cache_path)
    botocore_session.get_component('credential_provider').get_provider(
        'assume-role').cache = cli_cache
    session = boto3.Session(botocore_session=botocore_session)

    sessions[profile] = session

    try:
        creds = session.get_credentials().get_frozen_credentials()
    except KeyboardInterrupt:
        print()
        log.bad('aborted')
        sys.exit(1)

    env_vars = {
        'AWS_PROFILE': profile,
        'AWS_DEFAULT_PROFILE': profile,
        'AWS_ACCESS_KEY_ID': creds.access_key,
        'AWS_SECRET_ACCESS_KEY': creds.secret_key,
        'AWS_SESSION_TOKEN': creds.token,
    }
    for key, value in env_vars.items():
        if value:
            env[key] = value
Exemple #5
0
def main():

    if cmd == 'create':
        sys.exit(rc.create())

    if cmd in ('version', '-v', '-version', '--version'):
        log.ok('version: {}'.format(__version__))

    workspace_required = False

    if cmd:

        if cmd in commands_forbidden:
            log.bad('{} is disabled in jinjaform', cmd)
            sys.exit(1)

        if cmd not in commands_bypassed:

            if not project_root:
                log.bad(
                    'could not find .jinjaformrc file in current or parent directories'
                )
                log.bad(
                    'to start a new jinjaform project in the current directory, run "jinjaform create"'
                )
                sys.exit(1)

            workspace_required = True

    if workspace_required:

        if cwd == project_root:
            log.bad(
                'cannot run from the jinjaform project root directory, aborting'
            )
            sys.exit(1)

        for rc_cmd, rc_arg in rc.read():

            if rc_cmd == 'GIT_CHECK_BRANCH':

                git.check_branch(desired=rc_arg)

            elif rc_cmd == 'GIT_CHECK_CLEAN':

                git.check_clean()

            elif rc_cmd == 'GIT_CHECK_REMOTE':

                git.check_remote()

            elif rc_cmd == 'RUN':

                log.ok('run: {}'.format(rc_arg))
                returncode = subprocess.call(rc_arg, env=env, shell=True)
                if returncode != 0:
                    sys.exit(returncode)

            elif rc_cmd == 'TERRAFORM_RUN':

                log.ok('run: terraform')
                os.chdir(workspace_dir)
                returncode = terraform.execute(terraform_bin, args, env)
                if returncode != 0:
                    sys.exit(returncode)

            elif rc_cmd == 'WORKSPACE_CREATE':

                workspace.clean()
                workspace.create()

                aws.credentials_setup()

                if cmd == 'init':
                    aws.backend_setup()

            else:

                log.bad('configuration: {} is not a valid command', rc_cmd)
                sys.exit(1)

    else:

        log.ok('run: terraform')
        returncode = terraform.execute(terraform_bin, args, env)
        if returncode != 0:
            sys.exit(returncode)
Exemple #6
0
def backend_setup():

    region = s3_backend.get('region')
    if not region:
        return

    bucket = s3_backend.get('bucket')
    if bucket:

        log.ok('backend: s3://{} in {}', bucket, region)

        s3_client = get_default_session().client('s3')

        try:
            response = s3_client.get_bucket_versioning(Bucket=bucket)
        except s3_client.exceptions.NoSuchBucket:
            bucket_exists = False
            bucket_versioning = False
        else:
            bucket_exists = True
            bucket_versioning = response['Status'] == 'Enabled'

        if not bucket_exists:

            if not log.accept('backend: create s3://{} in {}', bucket, region):
                log.bad('backend: bucket not created')
                sys.exit(1)

            log.ok('backend: creating bucket')
            s3_client.create_bucket(
                ACL='private',
                Bucket=bucket,
                CreateBucketConfiguration={
                    'LocationConstraint': region,
                },
            )
            s3_client.get_waiter('bucket_exists').wait(Bucket=bucket)

        if not bucket_versioning:
            log.ok('backend: enabling versioning')
            s3_client.put_bucket_versioning(
                Bucket=bucket,
                VersioningConfiguration={
                    'Status': 'Enabled',
                },
            )

    dynamodb_table = s3_backend.get('dynamodb_table')
    if dynamodb_table:

        log.ok('backend: dynamodb://{} in {}', dynamodb_table, region)

        dynamodb_client = get_default_session().client('dynamodb')

        try:
            dynamodb_client.describe_table(TableName=dynamodb_table, )
        except dynamodb_client.exceptions.ResourceNotFoundException:

            if not log.accept('backend: create dynamodb://{} in {}',
                              dynamodb_table, region):
                log.bad('backend: table not created')
                sys.exit(1)

            log.ok('creating table')
            dynamodb_client.create_table(
                TableName=dynamodb_table,
                AttributeDefinitions=[{
                    'AttributeName': 'LockID',
                    'AttributeType': 'S',
                }],
                KeySchema=[
                    {
                        'AttributeName': 'LockID',
                        'KeyType': 'HASH',
                    },
                ],
                BillingMode='PAY_PER_REQUEST',
            )
Exemple #7
0
def _populate():

    # Create a template renderer that can handle multiple files
    # with variable references between files.
    template_renderer = MultiTemplateRenderer()

    # Discover files to create in the workspace. Files in multiple
    # levels of the project directory tree with the same name will
    # be combined into a single file in the workspace.

    tfvars_files = defaultdict(set)
    tf_files = defaultdict(set)
    other_files = defaultdict(set)

    current = cwd
    while (current + '/').startswith(project_root + '/'):
        for name in sorted(os.listdir(current)):
            if name.startswith('.'):
                continue
            path = os.path.join(current, name)
            if os.path.isdir(path):
                continue
            name = name.lower()
            if name.endswith('.tfvars'):
                tfvars_files[name].add(path)
            elif name.endswith('.tf'):
                tf_files[name].add(path)
            else:
                other_files[name].add(path)
        current = os.path.dirname(current)

    # Process .tfvars files first, and read their variable values,
    # because they are required when rendering .tf files.

    for name in sorted(tfvars_files):

        source_paths = sorted(tfvars_files[name])
        target_path = os.path.join(workspace_dir, name)

        if len(source_paths) == 1:
            log.ok('copy: {}', name)
        else:
            log.ok('combine: {}', name)

        with open(target_path, 'w') as output_file:

            for source_path in source_paths:

                with open(source_path) as source_file:
                    source_file_contents = source_file.read()

                relative_source_path = os.path.relpath(source_path, project_root)
                output_file.write('# jinjaform: {}'.format(relative_source_path))
                output_file.write('\n\n')
                output_file.write(source_file_contents)
                output_file.write('\n')

                if name == 'terraform.tfvars':
                    for key, value in hcl.loads(source_file_contents).items():
                        template_renderer.set_variable_value(key, value)

    # Process .tf files as templates.

    for name in sorted(tf_files):

        source_paths = sorted(tf_files[name])

        log.ok('render: {}', name)

        for source_path in source_paths:
            template_renderer.add_template(source_path)

    success, rendered = template_renderer.start()
    if not success:
        sys.exit(1)

    for name in sorted(tf_files):

        source_paths = sorted(tf_files[name])
        target_path = os.path.join(workspace_dir, name)

        with open(target_path, 'w') as output_file:

            for source_path in source_paths:

                relative_source_path = os.path.relpath(source_path, project_root)
                output_file.write('# jinjaform: {}'.format(relative_source_path))
                output_file.write('\n\n')
                output_file.write(rendered[source_path])
                output_file.write('\n')

    # Process remaining files. Do not add source comments because
    # the file format is unknown (e.g. json files would break with #).
    for name in sorted(other_files):

        source_paths = sorted(other_files[name])
        target_path = os.path.join(workspace_dir, name)

        if len(source_paths) == 1:
            log.ok('copy: {}', name)
        else:
            log.ok('combine: {}', name)

        with open(target_path, 'w') as output_file:
            for source_path in source_paths:
                with open(source_path) as source_file:
                    output_file.write(source_file.read())