def terraform_cd(**kwargs): """ Publish Terraform module in S3 bucket. """ module_name = kwargs["module_name"] tag = kwargs["module_version"] release_archive = "{project}-{tag}.tar.gz".format(project=module_name, tag=tag) bucket = kwargs["bucket"] setup_logging(LOG, debug=kwargs["debug"]) with TemporaryDirectory() as tmp_dir: with open(osp.join(tmp_dir, release_archive), "wb") as archive_descriptor: proc = Popen( [ "git", "archive", "--format=tar.gz", "--prefix={project}-{tag}/".format(project=module_name, tag=tag), tag, ], stdout=archive_descriptor, ) proc.communicate() setup_environment( config_path=kwargs["env_file"], role=kwargs["aws_assume_role_arn"] ) try: s3_client = boto3.client("s3") with open(osp.join(tmp_dir, release_archive), "rb") as archive_descriptor: s3_client.upload_fileobj( archive_descriptor, bucket, osp.join(module_name, release_archive), ExtraArgs={"ACL": "bucket-owner-full-control"}, ) LOG.info( "Published artifact to s3://%s/%s", bucket, osp.join(module_name, release_archive), ) except ClientError as err: LOG.error(err) try: sts_client = boto3.client("sts") LOG.error("AWS caller: %s", sts_client.get_caller_identity()["Arn"]) except ClientError: LOG.warning( "Failed to get AWS caller. Probably the client is not authenticated." ) sys.exit(1)
def main(debug, terraform_version, bin_dir): """ Install terraform binary locally. """ setup_logging(LOG, debug=debug) install_terraform(terraform_version, bindir=bin_dir)
def terraform_cd(**kwargs): """ Publish Terraform module in S3 bucket. """ module_name = kwargs["module_name"] tag = kwargs["module_version"] release_archive = "{project}-{tag}.tar.gz".format(project=module_name, tag=tag) target_location = kwargs["target_location"] setup_logging(LOG, debug=kwargs["debug"]) with TemporaryDirectory() as tmp_dir: release_archive_full_path = osp.join(tmp_dir, release_archive) module_directory_name = "{project}-{tag}".format(project=module_name, tag=tag) # generate archive using CI/CDs working directory if kwargs["include_artifacts"]: LOG.debug("Storing archive under %s", release_archive_full_path) symlink(getcwd(), osp.join(tmp_dir, module_directory_name)) proc = Popen([ "tar", "--directory={tmp}".format(tmp=tmp_dir), "--exclude-vcs", "--exclude=\\.env*", "--owner=0", "--group=0", "-chzf", release_archive_full_path, module_directory_name, ]) LOG.debug("Running %s", proc.args) proc.communicate() # generate archive using git archive else: with open(release_archive_full_path, "wb") as archive_descriptor: proc = Popen( [ "git", "archive", "--format=tar.gz", "--prefix={project}-{tag}/".format(project=module_name, tag=tag), tag, ], stdout=archive_descriptor, ) LOG.debug("Running %s", proc.args) proc.communicate() if kwargs["target"] == "s3": # sync tar.gz to s3 setup_environment(config_path=kwargs["env_file"], role=kwargs["aws_assume_role_arn"]) send_to_s3( bucket=target_location, local_file=release_archive_full_path, target_file=osp.join(module_name, release_archive), ) elif kwargs["target"] == "local": copy(release_archive_full_path, target_location)
import stat from pprint import pformat import boto3 import pytest from os import path as osp from terraform_ci import setup_environment, setup_logging # "114198773012" is our test account TEST_ACCOUNT = "114198773012" LOG = logging.getLogger(__name__) # setup terraform environment setup_environment() setup_logging(LOG, debug=True) # make sure tests run under our test account assert boto3.client("sts").get_caller_identity().get("Account") == TEST_ACCOUNT @pytest.fixture(scope="session") def ec2_client(): ec2 = boto3.client("ec2", region_name="us-east-2") response = ec2.describe_vpcs() assert len(response["Vpcs"]) == 1, ( "More than one VPC exists: %s" "Check if Travis-CI is running another test https://travis-ci.com/revenants-cie/" % pformat(response, indent=4)) return ec2
def terraform_ci(**kwargs): """ Run Terraform action. The tool prepares environment, sets environment variables for API keys, passwords, roles etc. It then runs a terraform action which may be either plan or apply. ci-runner can be called in a CI environment or locally on a workstation. """ debug = kwargs["debug"] modules_path = kwargs["modules_path"] module_name = kwargs["module_name"] env_file = kwargs["env_file"] aws_assume_role_arn = kwargs["aws_assume_role_arn"] action = kwargs["action"] setup_logging(LOG, debug=debug) try: pull_request = not environ["TRAVIS_PULL_REQUEST"] == "false" except KeyError: pull_request = False try: setup_environment(env_file, role=aws_assume_role_arn) except FileNotFoundError: LOG.warning("Environment file %s doesn't exit", env_file) # module name is parent directory mod = module_name or module_name_from_path(modules_path) LOG.info("Processing module %s", mod) status = {mod: run_job(osp.join(modules_path), action)} outputs = terraform_output(osp.join(modules_path)) if "github_token" in outputs: LOG.info( "Setting GITHUB_TOKEN and TF_VAR_github_token environment variable from module outputs." ) environ["GITHUB_TOKEN"] = outputs["github_token"]["value"] environ["TF_VAR_github_token"] = outputs["github_token"]["value"] if status[mod]["success"]: LOG.info("%s success: %s", mod, status[mod]["success"]) else: LOG.error("Failed to process %s", mod) LOG.error("STDOUT: %s", status[mod]["stdout"].decode("utf-8")) LOG.error("STDERR: %s", status[mod]["stderr"].decode("utf-8")) sys.exit(EX_SOFTWARE) if pull_request: delete_outdated_comments(status, environ["TRAVIS_REPO_SLUG"], int(environ["TRAVIS_PULL_REQUEST"])) post_comment(comment=render_comment(status)) else: LOG.info("Standard output:") sys.stdout.write( convert_to_newlines(status[mod]["stdout"]) or "no output\n") LOG.info("Standard error output:") sys.stderr.write( convert_to_newlines(status[mod]["stderr"]) or "no output\n")