Esempio n. 1
0
 def parameterize(self, call, host):
     """
     Parameterize a Call with its Context set to a per-host Config.
     """
     debug("Parameterizing {!r} for host {!r}".format(call, host))
     # Generate a custom ConnectionCall that knows how to yield a Connection
     # in its make_context(), specifically one to the host requested here.
     clone = call.clone(into=ConnectionCall)
     # TODO: using bag-of-attrs is mildly gross but whatever, I'll take it.
     clone.host = host
     return clone
Esempio n. 2
0
def init(c, project, quadrant):
    """
    Before terraform can run we need to initialize it.
    The init process sets up the backend for state management and insures we don't collide quadrants.
    """
    p_log("Task: Init")
    reform_root = settings.GetReformRoot()

    # TODO build this dynamically
    if project not in projects:
        debug("Init: Not a valid project: '%s'" % (project))
        p_log("Init: Not a valid project: '%s'" % (project))
        exit(1)

    project_path = "%s/projects/%s" % (reform_root, project)
    project_tf = Path(project_path)
    if not project_tf.is_dir():
        debug("Init: Project path does not exists: '%s'" % (project_path))
        p_log("Init: Project path does not exists: '%s'" % (project_path))
        exit(2)

    # Run pre task
    clean(c, project)
    preform(c, quadrant)

    _cmd = "%s init " % (tf_bin)
    with c.cd(project_path):
        _fmt_ = c.run("%s fmt" % (tf_bin)).stdout.strip()
        debug("Init: '%s fmt' output '%s'" % (tf_bin, _fmt_))
        _init_ = c.run(_cmd).stdout.strip()
        debug("Init: %s output '%s'" % (_cmd, _init_))
Esempio n. 3
0
    def parameterize(self, call, connection_init_kwargs):
        """
        Parameterize a Call with its Context set to a per-host Connection.

        :param call:
            The generic `.Call` being parameterized.
        :param connection_init_kwargs:
            The dict of `.Connection` init params/kwargs to attach to the
            resulting `.ConnectionCall`.

        :returns:
            `.ConnectionCall`.
        """
        msg = "Parameterizing {!r} with Connection kwargs {!r}"
        debug(msg.format(call, connection_init_kwargs))
        # Generate a custom ConnectionCall that has init_kwargs (used for
        # creating the Connection at runtime) set to the requested params.
        new_call_kwargs = dict(init_kwargs=connection_init_kwargs)
        clone = call.clone(into=ConnectionCall, with_=new_call_kwargs)
        return clone
Esempio n. 4
0
def mkS3Bucket(c, bucket, region):
    """
    Create the Secure S3 bucket we will store our secret keys in.
    This will use the kms key with alias aws/s3 for encrypting contents
    of S3 bucket.
    """
    p_log("Task: mkS3Bucket")
    # First lets find our kms key with alias kms/s3
    client = boto3.client("kms", region)
    bucket_constraint = {}
    if region == "us-east-1":
        s3c = boto3.client("s3")
    else:
        s3c = boto3.client("s3", region_name=region)
        bucket_constraint = {"LocationConstraint": region}

    p_log("Region: %s" % (region))
    create_args = {"ACL": "private", "Bucket": bucket}
    if region != "us-east-1":
        create_args["CreateBucketConfiguration"] = bucket_constraint

    response = s3c.create_bucket(**create_args)
    response = s3c.put_bucket_versioning(
        Bucket=bucket, VersioningConfiguration={"Status": "Enabled"}
    )
    debug("mkS3Bucket: {}".format(response))
    response = s3c.put_bucket_encryption(
        Bucket=bucket,
        ServerSideEncryptionConfiguration={
            "Rules": [
                {
                    "ApplyServerSideEncryptionByDefault": {
                        "SSEAlgorithm": "aws:kms",
                    }
                }
            ]
        },
    )
    debug("mkS3Bucket secure: {}".format(response))
    p_log("%s created in %s" % (bucket, region))
Esempio n. 5
0
def plan(c, project, quadrant):
    """
    This does a standard terraform plan in the project specified.
    It also requires to quadrant to specify what to propose changes for.
    """
    p_log("Start: Plan")
    reform_root = settings.GetReformRoot()

    # TODO build this dynamically
    if project not in projects:
        debug("Plan: Not a valid project: '%s'" % (project))
        p_log("Plan: Not a valid project: '%s'" % (project))
        exit(1)

    project_path = "%s/projects/%s" % (reform_root, project)
    project_tf = Path(project_path)
    if not project_tf.is_dir():
        debug("Plan: Project path does not exists: '%s'" % (project_path))
        p_log("Plan: Project path does not exists: '%s'" % (project_path))
        exit(2)

    # Run pre task
    init(c, project, quadrant)
    pl = os.getenv("TF_PARALLEL", 10)
    _cmd = "%s plan -out=tfplan -parallelism=%s" % (tf_bin, pl)

    with c.cd(project_path):
        _init_ = c.run(_cmd).stdout.strip()
        debug("Plan: %s output '%s'" % (_cmd, _init_))

    p_log("Complete: Plan")
Esempio n. 6
0
def apply(c, project, quadrant):
    """
    This applies a set of changes to terraform.
    It will run a plan first if a tfplan file is not found
    """
    p_log("Start: Apply")
    reform_root = settings.GetReformRoot()

    # TODO build this dynamically
    if project not in projects:
        debug("Apply: Not a valid project: '%s'" % (project))
        p_log("Apply: Not a valid project: '%s'" % (project))
        exit(1)

    project_path = "%s/projects/%s" % (reform_root, project)
    project_tf = Path(project_path)
    if not project_tf.is_dir():
        debug("Apply: Project path does not exists: '%s'" % (project_path))
        p_log("Apply: Project path does not exists: '%s'" % (project_path))
        exit(2)

    # Run plan if no tfplan exists
    project_tfplan = "%s/tfplan" % (project_path)
    project_tfplan_path = Path(project_tfplan)
    if not project_tfplan_path.is_file():
        plan(c, project, quadrant)
        debug("Apply: produce a plan")

    pl = os.getenv("TF_PARALLEL", 10)
    _cmd = "%s apply -parallelism=%s %s" % (tf_bin, pl, project_tfplan)

    with c.cd(project_path):
        _init_ = c.run(_cmd).stdout.strip()
        debug("Apply: %s output '%s'" % (_cmd, _init_))

    p_log("Complete: Apply")
Esempio n. 7
0
    def put(self, local, remote=None, preserve_mode=True):
        """
        Upload a file from the local filesystem to the current connection.

        :param local:
            Local path of file to upload, or a file-like object.

            **If a string is given**, it should be a path to a local (regular)
            file (not a directory).

            .. note::
                When dealing with nonexistent file paths, normal Python file
                handling concerns come into play - for example, trying to
                upload a nonexistent ``local`` path will typically result in an
                `OSError`.

            **If a file-like object is given**, its contents are written to the
            remote file path.

        :param str remote:
            Remote path to which the local file will be written; is subject to
            similar behavior as that seen by common Unix utilities or OpenSSH's
            ``sftp`` or ``scp`` tools.

            If ``None`` or another 'falsey'/empty value is given (the default),
            the remote current working directory (typically the connecting
            user's home directory) is assumed.

            .. note::
                When ``local`` is a file-like object, ``remote`` is required
                and must refer to a valid file path (not a directory).

        :param bool preserve_mode:
            Whether to ``chmod`` the remote file so it matches the local file's
            mode (default: ``True``).

        :returns: A `.Result` object.

        .. versionadded:: 2.0
        """
        # TODO: preserve honoring of  "name" attribute of file-like objects as
        # in v1, so one CAN just upload to a directory? did we just make that
        # shit up or is it an actual part of the api in newer Pythons?
        sftp = self.connection.sftp()

        if not local:
            raise ValueError("Local path must not be empty!")

        is_file_like = hasattr(local, 'write') and callable(local.write)

        # Massage remote path
        orig_remote = remote
        if not remote:
            if is_file_like:
                raise ValueError("Must give non-empty remote path when local is a file-like object!") # noqa
            else:
                remote = os.path.basename(local)
                debug("Massaged empty remote path into {!r}".format(remote))
        prejoined_remote = remote
        remote = posixpath.join(sftp.getcwd() or sftp.normalize('.'), remote)
        if remote != prejoined_remote:
            msg = "Massaged relative remote path {!r} into {!r}"
            debug(msg.format(prejoined_remote, remote))

        # Massage local path
        orig_local = local
        if not is_file_like:
            local = os.path.abspath(local)
            if local != orig_local:
                debug("Massaged relative local path {!r} into {!r}".format(orig_local, local)) # noqa

        # Run Paramiko-level .put() (side-effects only. womp.)
        # TODO: push some of the path handling into Paramiko; it should be
        # responsible for dealing with path cleaning etc.
        # TODO: probably preserve warning message from v1 when overwriting
        # existing files. Use logging for that obviously.
        #
        # If local appears to be a file-like object, use sftp.putfo, not put
        if is_file_like:
            msg = "Uploading file-like object {!r} to {!r}"
            debug(msg.format(local, remote))
            pointer = local.tell()
            try:
                local.seek(0)
                sftp.putfo(fl=local, remotepath=remote)
            finally:
                local.seek(pointer)
        else:
            debug("Uploading {!r} to {!r}".format(local, remote))
            sftp.put(localpath=local, remotepath=remote)
            # Set mode to same as local end
            # TODO: Push this down into SFTPClient sometime (requires backwards
            # incompat release.)
            #
            if preserve_mode:
                local_mode = os.stat(local).st_mode
                mode = stat.S_IMODE(local_mode)
                sftp.chmod(remote, mode)
        # Return something useful
        return Result(
            orig_remote=orig_remote,
            remote=remote,
            orig_local=orig_local,
            local=local,
            connection=self.connection,
        )
Esempio n. 8
0
    def put(self, local, remote=None, preserve_mode=True):
        """
        Upload a file from the local filesystem to the current connection.

        :param local:
            Local path of file to upload, or a file-like object.

            **If a string is given**, it should be a path to a local (regular)
            file (not a directory).

            .. note::
                When dealing with nonexistent file paths, normal Python file
                handling concerns come into play - for example, trying to
                upload a nonexistent ``local`` path will typically result in an
                `OSError`.

            **If a file-like object is given**, its contents are written to the
            remote file path.

        :param str remote:
            Remote path to which the local file will be written.

            .. note::
                Most SFTP servers set the remote working directory to the
                connecting user's home directory, and (unlike most shells) do
                *not* expand tildes (``~``).

                For example, instead of saying ``put("archive.tgz",
                "~/tmp/")``, say ``put("archive.tgz", "tmp/")``.

                In addition, this means that 'falsey'/empty values (such as the
                default value, ``None``) are allowed and result in uploading to
                the remote home directory.

            .. note::
                When ``local`` is a file-like object, ``remote`` is required
                and must refer to a valid file path (not a directory).

        :param bool preserve_mode:
            Whether to ``chmod`` the remote file so it matches the local file's
            mode (default: ``True``).

        :returns: A `.Result` object.

        .. versionadded:: 2.0
        """
        # TODO: preserve honoring of  "name" attribute of file-like objects as
        # in v1, so one CAN just upload to a directory? did we just make that
        # shit up or is it an actual part of the api in newer Pythons?
        sftp = self.connection.sftp()

        if not local:
            raise ValueError("Local path must not be empty!")

        is_file_like = hasattr(local, "write") and callable(local.write)

        # Massage remote path
        orig_remote = remote
        if not remote:
            if is_file_like:
                raise ValueError(
                    "Must give non-empty remote path when local is a file-like object!"  # noqa
                )
            else:
                remote = os.path.basename(local)
                debug("Massaged empty remote path into {!r}".format(remote))
        prejoined_remote = remote
        remote = posixpath.join(sftp.getcwd() or sftp.normalize("."), remote)
        if remote != prejoined_remote:
            msg = "Massaged relative remote path {!r} into {!r}"
            debug(msg.format(prejoined_remote, remote))

        # Massage local path
        orig_local = local
        if not is_file_like:
            local = os.path.abspath(local)
            if local != orig_local:
                debug(
                    "Massaged relative local path {!r} into {!r}".format(
                        orig_local, local
                    )
                )  # noqa

        # Run Paramiko-level .put() (side-effects only. womp.)
        # TODO: push some of the path handling into Paramiko; it should be
        # responsible for dealing with path cleaning etc.
        # TODO: probably preserve warning message from v1 when overwriting
        # existing files. Use logging for that obviously.
        #
        # If local appears to be a file-like object, use sftp.putfo, not put
        if is_file_like:
            msg = "Uploading file-like object {!r} to {!r}"
            debug(msg.format(local, remote))
            pointer = local.tell()
            try:
                local.seek(0)
                sftp.putfo(fl=local, remotepath=remote)
            finally:
                local.seek(pointer)
        else:
            debug("Uploading {!r} to {!r}".format(local, remote))
            sftp.put(localpath=local, remotepath=remote)
            # Set mode to same as local end
            # TODO: Push this down into SFTPClient sometime (requires backwards
            # incompat release.)
            #
            if preserve_mode:
                local_mode = os.stat(local).st_mode
                mode = stat.S_IMODE(local_mode)
                sftp.chmod(remote, mode)
        # Return something useful
        return Result(
            orig_remote=orig_remote,
            remote=remote,
            orig_local=orig_local,
            local=local,
            connection=self.connection,
        )
Esempio n. 9
0
def foo():
    debug("my-sentinel")
Esempio n. 10
0
def preform(c, quadrant):
    """
    A simple preprocessor for terraform that processes *\*.tf.tpl* files.
    This is how we work around terraforms lack of loops and conditionals.

    This is also how we seed our dynamic reform configs for state backend and and configs we've defined.
    """
    p_log("Start: Preform")
    projects_base_path = settings.GetReformRoot()

    # TODO Open this more to include modules
    work_dir = settings.GetReformRoot()
    modules_dir = "%s/modules" % (settings.GetReformRoot())
    projects_dir = "%s/projects" % (settings.GetReformRoot())
    template_suffix = ".tpl"
    env = Environment(loader=FileSystemLoader(work_dir), trim_blocks=True)

    # Custom Jinja Filters
    def is_list(value):
        return isinstance(value, list)

    def is_dict(value):
        return isinstance(value, dict)

    env.filters["is_list"] = is_list
    env.filters["is_dict"] = is_dict
    env.filters["jsonify"] = json.dumps

    config = ConfigManager.ConfigManager({"env": quadrant}).get_merge_configs()
    secret_manager = SecretsManager.SecretsManager(
        {"key": quadrant, "cipher": "RSA_AES"}
    )
    env_secret = secret_manager.getSecretPath(quadrant)
    secrets = secret_manager.decryptSecretFile(env_secret)
    # Handle modules dir
    for directory, subdirectories, files in os.walk(modules_dir):
        for file in files:
            if file.endswith(template_suffix):
                debug("Found template file: %s" % (file))
                full_file_path = os.path.join(directory, file)
                template = env.get_template(full_file_path.replace(work_dir, ""))
                new_full_file_path = re.sub(
                    template_suffix, "", os.path.join(directory, "preform_" + file)
                )

                debug("Generating file: %s" % (new_full_file_path))
                try:
                    with open(new_full_file_path, "w+") as outfile:
                        redered_template = template.render(
                            config=config,
                            project=os.path.basename(directory),
                            quadrant=quadrant,
                            secrets=secrets,
                        )
                        debug(redered_template)
                        outfile.write(
                            "##################################################\n"
                        )
                        outfile.write(
                            "# This file auto generated by preform, do not edit!\n"
                        )
                        outfile.write("# Instead edit \n")
                        outfile.write("# %s\n" % (full_file_path))
                        outfile.write(
                            "##################################################\n"
                        )
                        outfile.write("\n\n")
                        outfile.write(redered_template)
                        outfile.write("\n\n")
                    outfile.close()
                except:
                    pass

    # Handle projects dir
    for directory, subdirectories, files in os.walk(projects_dir):
        for file in files:
            if file.endswith(template_suffix):
                debug("Found template file: %s" % (file))
                full_file_path = os.path.join(directory, file)
                template = env.get_template(full_file_path.replace(work_dir, ""))
                new_full_file_path = re.sub(
                    template_suffix, "", os.path.join(directory, "preform_" + file)
                )

                debug("Generating file: %s" % (new_full_file_path))
                with open(new_full_file_path, "w+") as outfile:
                    redered_template = template.render(
                        config=config,
                        project=os.path.basename(directory),
                        quadrant=quadrant,
                        secrets=secrets,
                    )
                    debug(redered_template)
                    outfile.write(
                        "##################################################\n"
                    )
                    outfile.write(
                        "# This file auto generated by preform, do not edit!\n"
                    )
                    outfile.write("# Instead edit \n")
                    outfile.write("# %s\n" % (full_file_path))
                    outfile.write(
                        "##################################################\n"
                    )
                    outfile.write("\n\n")
                    outfile.write(redered_template)
                    outfile.write("\n\n")
                outfile.close()

    p_log("Complete: Preform")
Esempio n. 11
0
def clean(c, project):
    """
    This will clean up the terraform cache directory reform files from your project
    You need to do this between quadrants
    """
    p_log("Task: Clean")
    reform_root = settings.GetReformRoot()
    if project not in projects:
        debug("Clean: Not a valid project: '%s'" % (project))
        p_log("Clean: Not a valid project: '%s'" % (project))
        exit(1)

    project_path = "%s/projects/%s/.terraform" % (reform_root, project)
    project_tf_cache = Path(project_path)
    if not project_tf_cache.is_dir():
        debug("Clean: Project cache path does not exists: '%s'" % (project_path))

    clean = c.run("rm -Rf %s" % (project_path)).stdout.strip()
    debug("Clean Result: %s" % (clean))

    old_project_tfplan = "%s/projects/%s/tfplan" % (reform_root, project)
    old_project_tfplan_path = Path(old_project_tfplan)
    if old_project_tfplan_path.is_file():
        os.remove(old_project_tfplan)
        debug("Clean: Removed '%s'" % (old_project_tfplan))

    # We should also cleanup any preform files at this step just incase they
    # change and get abandoned
    preform_path = "%s/projects/%s/**/preform_*.tf" % (reform_root, project)
    for filename in glob.iglob(preform_path, recursive=True):
        debug("Clean: removing %s" % (filename))
        os.remove(filename)

    # TODO Remove preform files from modules
    preform_path = "%s/modules/%s/**/preform_*.tf" % (reform_root, project)
    for filename in glob.iglob(preform_path, recursive=True):
        debug("Clean: removing %s" % (filename))
        os.remove(filename)
Esempio n. 12
0
def foo(ctx):
    debug("my-sentinel")
Esempio n. 13
0
def mkS3Bucket(c, bucket, region):
    """
    Create the Secure S3 bucket we will store our secret keys in.
    This will use the kms key with alias aws/s3 for encrypting contents
    of S3 bucket.
    """
    p_log("Task: mkS3Bucket")
    # First lets find our kms key with alias kms/s3
    client = boto3.client("kms", region)
    bucket_constraint = {}
    if region == 'us-east-1':
        s3c = boto3.client("s3")
    else:
        s3c = boto3.client("s3", region_name=region)
        bucket_constraint = {"LocationConstraint": region}

    response = client.list_aliases(Limit=100)
    more = True
    kms_id = False
    while more:
        more = False
        # print(response)
        for a in response["Aliases"]:
            if a["AliasName"] == "alias/aws/s3":
                if "TargetKeyId" not in a:
                    p_log(
                        "Can't create secure bucket '%s' in '%s', no initial kms alias/aws/s3 setup yet.  This happens after you've created your first secure s3 bucket."
                        % (bucket, region), 'error')
                kms_id = a["TargetKeyId"]
                break
        if response["Truncated"]:
            more = True
            response = client.list_aliases(Marker=response["NextMarker"],
                                           Limit=100)

    if not kms_id:
        debug("mkS3Bucket: Never found KMS ID for secure bucket creation")
        exit(3)

    p_log("Region: %s" % (region))
    create_args = {"ACL": "private", "Bucket": bucket}
    if region != 'us-east-1':
        create_args['CreateBucketConfiguration'] = bucket_constraint

    response = s3c.create_bucket(**create_args)
    response = s3c.put_bucket_versioning(
        Bucket=bucket, VersioningConfiguration={"Status": "Enabled"})
    debug("mkS3Bucket: {}".format(response))
    response = s3c.put_bucket_encryption(
        Bucket=bucket,
        ServerSideEncryptionConfiguration={
            "Rules": [{
                "ApplyServerSideEncryptionByDefault": {
                    "SSEAlgorithm": "aws:kms",
                    "KMSMasterKeyID": kms_id,
                }
            }]
        },
    )
    debug("mkS3Bucket secure: {}".format(response))
    p_log("%s created in %s" % (bucket, region))